blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
5
283
content_id
stringlengths
40
40
detected_licenses
sequencelengths
0
41
license_type
stringclasses
2 values
repo_name
stringlengths
7
96
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
58 values
visit_date
timestamp[us]
revision_date
timestamp[us]
committer_date
timestamp[us]
github_id
int64
12.7k
662M
star_events_count
int64
0
35.5k
fork_events_count
int64
0
20.6k
gha_license_id
stringclasses
11 values
gha_event_created_at
timestamp[us]
gha_created_at
timestamp[us]
gha_language
stringclasses
43 values
src_encoding
stringclasses
9 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
7
5.88M
extension
stringclasses
30 values
content
stringlengths
7
5.88M
authors
sequencelengths
1
1
author
stringlengths
0
73
5c128414d787d0002b8b156994c5ce715b7071c9
69e37937bca4dc0a2df384c62033e805433d799c
/finalProject651/finalProject651/urls.py
7891d9512b07fedf9177943a5742275772e2d919
[ "MIT" ]
permissive
o0baozi0o/Collaborative-Editor
5217db349ce682f2ef6f1954aed02eb96c311864
0f5abc10114bca507323ac14aa71a8fa96608bfa
refs/heads/master
2021-01-19T18:44:38.888907
2017-04-23T17:39:50
2017-04-23T17:39:50
88,377,817
0
0
null
null
null
null
UTF-8
Python
false
false
1,589
py
"""finalProject651 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) 为了建立映射,我们用到了tuple.在Django里必须用urlpatterns来命名这个元组. 这个urlpatterns元组包含一些django.conf.urls.url()函数的调用,而每个函数里都有一个唯一的映射. 在上面的代码里,我们只用了url()一次,所以我们只映射了一个URL.django.conf.urls.url() 函数的第一个参数是正则表达式^$,指的是匹配一个空字符串.所有匹配这个模式的URL都会映射到views.index()这个视图. 用户的请求信息会包含在HttpRequest对象里作为参数传递给视图.我们给url()函数可选参数name赋值为index. """ from django.conf.urls import include, url from django.contrib import admin from django.conf import settings from django.conf.urls.static import static from Editor import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^add_string/$', views.addString, name='add_string'), url(r'^admin/', admin.site.urls), ]
4cce8382c68d039180c6284d5c76f461eac33466
794bf9c9ca0db8d3503755525aad3c7a0f5a5bc6
/pyramid_tutorial/tests.py
199c0ff2c654c08ca52acad5e2564c01a23a8204
[]
no_license
kdowney-cars/Pyramid-Tutorial
86935a0b3fc104f949dc9a699782f97457a71610
bc4acd8fda573cfcf42d58a859120d07fe6a8013
refs/heads/master
2021-01-19T12:58:40.415796
2011-08-27T22:09:30
2011-08-27T22:09:30
2,280,998
0
0
null
null
null
null
UTF-8
Python
false
false
715
py
import unittest from pyramid.config import Configurator from pyramid import testing def _initTestingDB(): from sqlalchemy import create_engine from pyramid_tutorial.models import initialize_sql session = initialize_sql(create_engine('sqlite://')) return session class TestMyView(unittest.TestCase): def setUp(self): self.config = testing.setUp() _initTestingDB() def tearDown(self): testing.tearDown() def test_it(self): from pyramid_tutorial.views import my_view request = testing.DummyRequest() info = my_view(request) self.assertEqual(info['root'].name, 'root') self.assertEqual(info['project'], 'pyramid_tutorial')
04a72843674890d185caccf422d3b6323efd2c35
2c0e45cfe1629f30ee8916c1266e0925e08f6e2f
/class_01/qstn.py
27f8114dc59fa64466eee3917c3c2fb6881f10d1
[]
no_license
shashankth7/Perceptron_Summer_2017
a801be9c1fe410a7f28b1d50b1b5dc11933f0aa7
b0dd343eef24c7fb5523d5f7bef90c2ef6030eea
refs/heads/master
2021-06-21T21:46:31.445086
2017-08-10T10:53:42
2017-08-10T10:53:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
470
py
"""class A(): x = 1 a = A() print a.x b = [A() for ix in range(5)] # for ix in range(5): # b.append(A()) for ix in b: print ix.x b[2].x = 10 for ix in b: print ix.x""" class Obj: def __init__(self, x=0, y=10): self.x = x self.y = y objects = [Obj(1, 0), Obj(70, -1), Obj(11, -1), Obj(4, 2), Obj(-1, 10)] for ix in objects: print ix.x, ix.y print '-'*80 X = sorted(objects, key=lambda z: [z.y, z.x]) for ix in X: print ix.x, ix.y # s.split('a', 2)
809cc7a01d4fff06a0417eebcd46cb2e5e99dd93
585738b31db9676196d9a37a2e62d2e80234c8ce
/QDrillerPlugin/generatesection_dialog.py
23e329322de9c257dc2e359378ee1f3f3a74e0e0
[]
no_license
valheran/QDriller
9e943e835c7e7ed22cfa7ff2b141d8e2dc7be08f
d19882c08d9ce6abe321dd5072d782e902888755
refs/heads/master
2021-01-22T05:10:33.714921
2015-06-01T06:11:51
2015-06-01T06:11:51
33,072,071
1
0
null
2015-04-09T08:56:51
2015-03-29T12:31:18
Python
UTF-8
Python
false
false
1,792
py
# -*- coding: utf-8 -*- """ /*************************************************************************** generatesection_dialog A QGIS plugin Drillhole visualisation tools ------------------- begin : 2015-03-30 git sha : $Format:%H$ copyright : (C) 2015 by Alex Brown email : email ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ import os from PyQt4 import QtGui, uic FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'generatesection_dialog_base.ui')) class GenerateSection(QtGui.QDialog, FORM_CLASS): def __init__(self, parent=None): """Constructor.""" super(GenerateSection, self).__init__(parent) # Set up the user interface from Designer. # After setupUI you can access any designer object by doing # self.<objectname>, and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self)
2ed3a5b98157652f32bdc847a800f0a7fa6321cf
984672ca80fafb806200c970e6bec72f4fca6866
/Exercise/Py47/Py47.py
db69e1c8d80730038c953317c224dfca8086b0a2
[]
no_license
shudongW/python
c3b8eaa8cf228348698ef2ce1a2e1ec3ff85e301
020213d40810aa01eabebbb48b91489c57c9d7f7
refs/heads/master
2020-03-15T05:39:12.056210
2018-05-17T09:34:38
2018-05-17T09:34:38
null
0
0
null
null
null
null
UTF-8
Python
false
false
351
py
#-*- coding:UTF-8 -*- #笨办法学编程py3---测试nosetests class Room(object): def __init__(self,name,description): self.name = name self.description = description self.paths = {} def go(self,direction): return self.paths.get(direction,None) def add_paths(self,paths): self.paths.update(paths)
6551610ecfb2b88f83584e0d8ec132ec441c3a32
41000f1d38394dde3534945567f45cc52ce2f096
/jackalope/handlers/taskrabbit.py
d695fa24897f6c3129c09fc9db9f668de2c27554
[]
no_license
sendjack/jackalope
b1187e7fededb95873b571058224d03aaf6767a0
6ba787d682fd3c28678207faf9fd75db645b5b3f
refs/heads/master
2021-01-21T12:39:35.371257
2013-04-08T22:02:27
2013-04-08T22:02:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
867
py
""" taskrabbit ---------- Callback handlers for any notifications from Task Rabbit. """ from model.worker.transformer import FIELD from model.worker.task_rabbit_employee import TASK_RABBIT, TASK_RABBIT_FIELD from .vendor import TaskVendorHandler, CommentVendorHandler class TaskRabbitTaskHandler(TaskVendorHandler): vendor = TASK_RABBIT.VENDOR def _process_request(self): if self._id is not None: self._foreman.send_jack_for_task(self.vendor, self._id) else: body = self._get_request_body() items = body.get(TASK_RABBIT_FIELD.ITEMS) for item in items: id = item.get(TASK_RABBIT_FIELD.TASK).get(FIELD.ID) self._foreman.send_jack_for_task(self.vendor, id) class TaskRabbitCommentHandler(CommentVendorHandler): vendor = TASK_RABBIT.VENDOR
1f4cf2e86e44b06a13d2291d800440d4aed7ea8a
52a4d5042e64efb2a51fbcb1f1e2fc93dd13c7f8
/src/c_update.py
e8fed36127c59032c5b4d0ffb4b9f7966a90ad42
[]
no_license
arutunyan-gv/telegram_bot
fcb12e26c933a0ead6f8d428d341cf09295faefc
27b74ce9340e4457294c3ae76ede3f5fe9cf9405
refs/heads/master
2023-02-27T02:31:02.300848
2021-02-03T10:31:23
2021-02-03T10:31:23
335,587,328
0
0
null
null
null
null
UTF-8
Python
false
false
1,950
py
# -*- coding=utf-8 -*- from prefs.config import SEARCHES, SNAPSHOTS, SETTINGS, ADMINS, EXTENSIONS from src.telegram_helper import reply_func from src.db_helper import run_query_ro, run_query_rw from src.c_help import help_chapter def read_settings(dictionary, query): table = run_query_ro(query) commands = [] queries = {} if table: for row in table: dictionary[row[0]] = [row[1], row[2]] def chapter(header, content, prefix=''): result = '' elem = '' for element in content: try: elem = ': ' + str(content[element]) + ' ' except: elem = '' result += ' - `' + prefix + str(element) + elem + '`\n' return str('\n' + str(header) + '\n' + result) def update_settings(): read_settings(SEARCHES, 'select command, query, description from bot.commands_searches order by command') read_settings(SNAPSHOTS, 'select snapshot_name, query, description from bot.addons_snapshot_info order by snapshot_name') read_settings(SETTINGS, 'select name, value, comment from bot.prefs_main order by name') read_settings(ADMINS, 'select user_id, username, name from bot.prefs_admins order by user_id') def command_update(message): update_settings() reply = '🤖 Бот ЕГИС ОКНД 🤖\n' reply += help_chapter( '*Настройки:*', SETTINGS, suffix=': ', row_num=0) reply_func(message, reply) reply = '' reply += help_chapter( '*Админимтраторы:*', ADMINS, suffix=' — ') reply_func(message, reply) reply = '' reply += help_chapter( '*Поиск заданий:*', SEARCHES, suffix=' — ') reply_func(message, reply) reply = '' reply += help_chapter( '*Снапшоты:*', SNAPSHOTS, suffix=':', new_line=True) reply_func(message, reply) reply = ''
a9eb1713de5ea004dd314f3e1f8b5f1a2d0650ef
0acaea4afbb39c1767cc9290e4e8d2c292030f8d
/connections.py
79ddaf51b0e4368bf865d05de3d0be86ee4562f5
[]
no_license
Gokulapriya98/python
1a530558c33411c1dd7a64c8229d87b533dc0397
325d0f4e16a1f62e89cb517de069e1be057a68fd
refs/heads/master
2020-06-12T16:52:10.625703
2019-07-09T05:39:01
2019-07-09T05:39:01
194,363,920
0
0
null
null
null
null
UTF-8
Python
false
false
238
py
import mysql.connector cnx = mysql.connector.connect(user='admin', password='admin123', host='127.0.0.1',database='priya', auth_plugin='mysql_native_password') print(cnx)
5451fbf1abf096bcb5d1ed606afc1f7a274816ee
47ccad21a4157f380b0b5a08fd7b7d55a5327918
/identity-resolution/notebooks/identity-graph/nepytune/visualizations/pie_chart.py
85c13ceb2f78cfdcfabe5834504815422b6a696b
[ "MIT-0" ]
permissive
austinkline/aws-admartech-samples
c8c2c2d36a528a17a5797e31b47368717ffe79c3
de30dd21a9ffbbfce216bab789bb50b59540a953
refs/heads/master
2023-01-09T22:14:04.693588
2020-11-13T19:03:12
2020-11-13T19:03:12
312,659,135
0
0
NOASSERTION
2020-11-13T19:03:13
2020-11-13T18:55:05
null
UTF-8
Python
false
false
3,031
py
import colorlover as cl import plotly.graph_objects as go from plotly.subplots import make_subplots def show(data): type_labels, type_values = zip(*data["type"].items()) device_labels, device_values = zip(*data["device"].items()) browser_labels, browser_values = zip(*data["browser"].items()) fig = make_subplots(rows=3, cols=1, specs=[ [{"type": "pie"}], [{"type": "pie"}], [{"type": "pie"}] ]) fig.add_trace( go.Pie(labels=list(reversed(type_labels)), values=list(reversed(type_values)), hole=0, name="Type", marker={'colors': ['#7F7FFF', '#FF7F7F']}, textinfo='label+percent', hoverinfo="label+percent+value", textfont_size=20 ), row=2, col=1, ) fig.add_trace( go.Pie(labels=["device <br> type"], values=[data["type"]["device"]], hole=0, textinfo='label', hoverinfo="label+value", marker={'colors': ['#7F7FFF']}, textfont_size=20 ), row=1, col=1, ) fig.add_trace( go.Pie(labels=device_labels, values=device_values, hole=.8, opacity=1, textinfo='label', textposition='outside', hoverinfo="label+percent+value", marker={'colors': ['rgb(247,251,255)', 'rgb(222,235,247)', 'rgb(198,219,239)', 'rgb(158,202,225)', 'rgb(107,174,214)', 'rgb(66,146,198)', 'rgb(33,113,181)', 'rgb(8,81,156)', 'rgb(8,48,107)', 'rgb(9,32,66)', ] }, textfont_size=12), row=1, col=1, ) fig.add_trace( go.Pie(labels=["cookie <br> browser"], values=[data["type"]["cookie"]], hole=0, textinfo='label', hoverinfo="label+value", marker={'colors': ['#FF7F7F']}, textfont_size=20), row=3, col=1, ) fig.add_trace( go.Pie(labels=browser_labels, values=browser_values, hole=.8, textinfo='label', textposition='outside', hoverinfo="label+percent+value", marker={'colors': ['rgb(255,245,240)', 'rgb(254,224,210)', 'rgb(252,187,161)', 'rgb(252,146,114)', 'rgb(251,106,74)', 'rgb(239,59,44)', 'rgb(203,24,29)', 'rgb(165,15,21)', 'rgb(103,0,13)', 'rgb(51, 6,12)' ] }, textfont_size=12), row=3, col=1, ) fig.update_layout( showlegend=False, height=1000, ) fig.show()
f599806afc0260a92a675e2b5f1bb13700ba321f
705bdf06663023cfc05c7abf716f64bf23958519
/inventory/migrations/0005_product_stock.py
75d130398bc20586b368f6865268d99a2ac914b0
[]
no_license
dovjay/django-stock-chain
36e710fbcfb1427e170322ad1d5e8ee7c2747d77
8dbcc03a236544eb3141f2342264f2fa839d6e4a
refs/heads/development
2021-07-20T23:34:06.425258
2021-02-05T08:11:26
2021-02-05T08:11:26
249,880,414
0
0
null
2021-06-10T22:49:50
2020-03-25T03:42:53
HTML
UTF-8
Python
false
false
393
py
# Generated by Django 3.0.4 on 2020-03-26 03:42 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('inventory', '0004_auto_20200326_1038'), ] operations = [ migrations.AddField( model_name='product', name='stock', field=models.PositiveIntegerField(default=0), ), ]
db60067a452bba8447a1a851b57f2b98726e5a3d
6fd5d30cf21716893388442eb0f9c16e13b91315
/others/educational_dp_contest/b.py
4e1ab3220f3f00816eaef66a7a858f7f6b4a9b32
[]
no_license
mgmk2/atcoder-python
23d45f3195977f1f5839f6a6315e19cac80da2be
beec5857a8df2957ff7b688f717d4253b4196e10
refs/heads/master
2021-06-09T20:00:22.500222
2021-05-04T15:36:39
2021-05-04T15:36:39
179,711,330
1
0
null
null
null
null
UTF-8
Python
false
false
270
py
n, k = map(int, input().split()) h = list(map(int, input().split())) dp = [0 for _ in range(n)] for i in range(1, n): hi = h[i] x = 2 * 10 ** 9 for j in range(1, min(k, i) + 1): x = min(x, dp[i - j] + abs(hi - h[i - j])) dp[i] = x print(dp[-1])
643046e25434e6f3f0c920cb0ea56cb7d5886afd
7e8fd85526fcdb2bc44e44506b43c4f3c478d720
/src/piazza/migrations/0017_auto_20210326_1432.py
2fe01757be56d4580bc44d6f30ddaec547a0a5ba
[]
no_license
artkuzmics/C
4d981702944d233052a67f1973b62bacec8683f6
2d5fc0f400f53fd56e0b6125035cffcc355bc092
refs/heads/main
2023-03-30T15:42:37.554513
2021-03-30T18:22:55
2021-03-30T18:22:55
null
0
0
null
null
null
null
UTF-8
Python
false
false
499
py
# Generated by Django 3.0 on 2021-03-26 14:32 import datetime from django.db import migrations, models from django.utils.timezone import utc class Migration(migrations.Migration): dependencies = [ ('piazza', '0016_auto_20210326_1405'), ] operations = [ migrations.AlterField( model_name='post', name='timestamp', field=models.DateTimeField(default=datetime.datetime(2021, 3, 26, 14, 32, 51, 903967, tzinfo=utc)), ), ]
c6978cba244464649a7929e2966a6b9218cdb069
1f10ed9fd1f1ef56505619464ef0fc0a0507977c
/src/source/currencies.py
bd8c0d306e5213317046d800298362eb2e112da8
[]
no_license
marcelkawski/currency-pl
149fbd979c2254e5c7e459fe587873175d9ec921
8655bd60f0a06c9c686eff808f0dc1160888a406
refs/heads/master
2023-03-07T18:53:44.705646
2021-02-18T11:40:46
2021-02-18T11:40:46
318,784,245
0
0
null
null
null
null
UTF-8
Python
false
false
38
py
class Currencies: currencies = {}
9f6e55e39976e7abeb35f12ef07eb44d0a55f640
15568ba8df1a66e75c6166eb2b0e9837e95064b4
/ppp/member/migrations/0002_auto_20200720_2248.py
c36cdae40ed621a09146831b7113d5aeafff3947
[]
no_license
jalacardio/newppp
b8055a9883797b64c1f3f1ad62386de45a0f478d
28e37147c0c47e759fc740902d93834689cf9d21
refs/heads/master
2023-01-15T18:29:41.818081
2020-11-12T16:22:13
2020-11-12T16:22:13
299,981,398
0
0
null
null
null
null
UTF-8
Python
false
false
386
py
# Generated by Django 3.0 on 2020-07-20 22:48 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('member', '0001_initial'), ] operations = [ migrations.AlterField( model_name='user', name='last_login', field=models.DateTimeField(blank=True, null=True), ), ]
821f7029ef1f5d93e671b7a24b7561fb281208f6
9baa449f2084f42506932f253f69401d38633d1d
/main.py
8a07ef3466331638697569e1c9d52bbaa9492c7c
[]
no_license
zhengyong106/BrandSpider
cb78c3105f1e5d50632612ac2be5d5174ec95a1e
29471c702b18e48f57aa0c9c2b1e4c76e34e96f0
refs/heads/master
2020-04-01T11:16:10.374563
2018-10-21T16:38:55
2018-10-21T16:38:55
153,154,714
2
0
null
null
null
null
UTF-8
Python
false
false
191
py
# -*- coding:utf-8 -*- from scrapy.cmdline import execute import sys import os sys.path.append(os.path.dirname(os.path.abspath(__file__))) execute("scrapy crawl leijichengjiao".split(" "))
cab0c9e28d93678ddafd2edcde1685b4ba839ad6
f9e8733ed87858b12bfee6b70ccdddd6a616b60a
/20.py
6badb135fd1461e5e92cc11509094f285c4ec15f
[]
no_license
MajestyLee/leetcode_TopInterview
c1c9c923d3bf42cd4777bb2a2ccd21654a7c6dbb
30b7d5acec716b7d754141835fc8bafe4411437e
refs/heads/master
2020-04-01T12:19:20.837383
2018-11-06T02:13:44
2018-11-06T02:13:44
153,200,785
0
0
null
null
null
null
UTF-8
Python
false
false
915
py
''' Given a string containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid. An input string is valid if: Open brackets must be closed by the same type of brackets. Open brackets must be closed in the correct order. Note that an empty string is also considered valid. Example 1: Input: "()" Output: true Example 2: Input: "()[]{}" Output: true Example 3: Input: "(]" Output: false Example 4: Input: "([)]" Output: false Example 5: Input: "{[]}" Output: true ''' class Solution(object): def isValid(self, s): stack = [] dic = {"]":"[", "}":"{", ")":"("} for ch in s: if ch in dic.values(): stack.append(ch) elif ch in dic.keys(): if stack == [] or dic[ch] != stack.pop(): return False else: return False return stack == []
280291543587999631b2d70414fa5e69efd4fe65
5a26a820b34ed5bfb988d43446110f581e92f44e
/todo_pro/settings.py
ffbc6ae9af9dd41b03ac427de88caf0483910fc3
[]
no_license
haricudant/ToDo
b3ed3debc2ea9aa0200ed26081dc7642e84496f6
fbf31647f438d228c7fe546a8ec9adbfe02f8f42
refs/heads/master
2021-02-08T17:54:18.172782
2020-03-01T16:08:37
2020-03-01T16:08:37
244,180,827
0
0
null
null
null
null
UTF-8
Python
false
false
3,210
py
""" Django settings for todo_pro project. Generated by 'django-admin startproject' using Django 2.1.3. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '344+4en^vj_y=q(ds6(=4pmu)ccx@jftvx1#y+7t^lywr_kqa#' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'firstapp', 'crispy_forms', 'bootstrapform' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'todo_pro.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join('template')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'todo_pro.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' CRISPY_TEMPLATE_PACK = 'bootstrap4'
f285dbaa62e68fd334122fe5c8f2c43ba9ff3801
5deb698b92fce783391a6dcb9fcd9d3df84bc239
/demo.py
839ad60719c5fb4e649958683cbf05f96ab884b4
[]
no_license
YONGBAAM/SpineChallenge
9a94acb51ba188dfadd989f0401fea80a6afd90f
89eb91e9a843990565452c63533ce9cfbd72b463
refs/heads/master
2022-05-31T23:26:41.141313
2022-03-20T06:03:24
2022-03-20T06:03:24
235,288,466
0
1
null
null
null
null
UTF-8
Python
false
false
6,204
py
import numpy as np import matplotlib.pyplot as plt import os from PIL import Image import scipy.io as spio path = './boostnet_labeldata' training_data_path = path + '/data/training' training_label_path = path + '/labels/training' plot_save_path = './label_plots' label_path_all = os.listdir(training_label_path) label_list = [] output_list = [] ap_num = 32; lat_num = 32; show_every = 5000 save_mode = True testMode = False def isS(p): H = p.shape[0] ll = np.zeros((H - 2, 1)) for i in range(H - 2): ll[i,0] = (p[i, 1] - p[H - 1, 1]) / (p[0, 1] - p[H - 1, 1]) - (p[i, 0] - p[H - 1, 0]) / (p[0, 0] - p[H - 1, 0]) S = np.matmul(ll, ll.T) flag = S < 0 return np.sum(flag) >= 1 for label_name in label_path_all: if '.mat' == label_name[-4:]: label_list.append(label_name) data_list = [label_name[:-4] for label_name in label_list] landmarks_ap = [] landmarks_lat = [] import pandas as pd label_path_training = './boostnet_labeldata/labels/training/angles.csv' true_label_df = pd.read_csv(label_path_training, index_col=None, header=None) true_label_df.rename(columns={0: 'true0', 1: 'true1', 2: 'true2'}, inplace=True) #####################@@@@@@@@@@@ for data_no in range(len(data_list)): #for data_no in range(1): image =Image.open(training_data_path + '/' + data_list[data_no]) image = np.asarray(image) lb = spio.loadmat(training_label_path + '/' + label_list[data_no]) coord = np.array(lb['p2']) H,W = image.shape ##Landmark : normalized된 좌표 concatenating if 'lateral' in label_list[data_no]: vnum = int(lat_num/4) -1 coord = coord[:lat_num*2] normalized_x_axis = coord[:, 0] / W normalized_y_axis = coord[:, 1] / H landmark = np.concatenate((normalized_x_axis, normalized_y_axis), axis = 0) landmarks_lat.append(landmark) else: vnum = int(ap_num/4) -1 coord = coord[:ap_num*2] normalized_x_axis = coord[:, 0] / W normalized_y_axis = coord[:, 1] / H landmark = np.concatenate((normalized_x_axis, normalized_y_axis), axis = 0) landmarks_ap.append(landmark) cobb_angles, pos, mid_points, mid_lines, vec_lines = calc_angle(coord = coord, image_size = (H,W), vnum = vnum) pos1, pos2, pos11, pos22 = pos true_angle = true_label_df.iloc[data_no] error = [0,0,0] for i in range(3): if true_angle[i] != 0: error[i] = np.abs(cobb_angles[i] - true_angle[i]) / true_angle[i] * 100 output_string_test = '%d , pos:%d %d, %.1f/%.1f/%.1f, error%% %.1f/%.1f/%.1f' % ( data_no, pos1, pos2, cobb_angles[0], cobb_angles[1], cobb_angles[2], error[0], error[1], error[2]) if testMode == True: print(output_string_test) continue # output_list.append({'no' : data_no, # 'cobb_angles' : cobb_angles, # 'pos1' : pos1, # 'pos2' : pos2}) output_list.append(np.array([cobb_angles[0], cobb_angles[1], cobb_angles[2], pos1, pos2])) save_path = plot_save_path + '/' + data_list[data_no] if save_mode == True else None fig = make_plot('angle : %.2f most tilted : %d, %d, u%d, l%d' % (cobb_angles[0], pos1, pos2, pos11, pos22), image, mid_lines, pos) # plt.figure() # plt.title('angle : %.2f most tilted : %d, %d' % (pt, pos2, pos1)) # plt.imshow(image, cmap='gray', vmin=0, vmax=255) # # plt.plot(mid_lines[pos1,0], mid_lines[pos1,1]) # # # Plotting the points # plt.scatter(mid_lines[:, 0], mid_lines[:, 1], s=1.2, c='yellow') # # # Calculating the cross point for two vectors. # # # v11 = mid_lines[pos1 * 2] # # v12 = mid_lines[pos1 * 2 + 1] # # v21 = mid_lines[pos2 * 2] # # v22 = mid_lines[pos2 * 2 + 1] # # s1 = v12 - v11 # slope # # s2 = v22 - v11 # # t2 = (v11[0] - v21[0]) * s1[1] - (v11[1] - v21[1]) * s1[0] # # t2 /= (s2[0] * s1[1] - s2[1] * s1[0]) # # t1 = (s2[1] * s1[1] * t2 - (v11[1] - v21[1]) * s1[0]) / (s1[0] * s1[1]) # # cross_point = v11 + t1 * (s1) # # plt.plot() # # # plt.plot(cross_point[0], cross_point[1], v11[0], v11[1], 'c.-', alpha = 0.5, linewidth = 2) # # plt.plot(cross_point[0], cross_point[1], v21[0], v21[1], 'c.-', alpha = 0.5, linewidth = 2) # plt.plot(mid_lines[pos1 * 2:pos1 * 2 + 2, 0], mid_lines[pos1 * 2:pos1 * 2 + 2, 1], 'c.-', alpha=0.5, linewidth=2) # plt.plot(mid_lines[pos2 * 2:pos2 * 2 + 2, 0], mid_lines[pos2 * 2:pos2 * 2 + 2, 1], 'c.-', alpha=0.5, linewidth=2) if data_no % show_every ==0: plt.show() output_string = "{} : (PT, MT, TL/L) are {}, {}, {}\n " \ "and the two most tilted vertebrae are {}, {}".format(data_no, cobb_angles[0], cobb_angles[1], cobb_angles[2], pos2, pos1) print(output_string) plt.close() #get label angle # calculated_df = pd.DataFrame(np.array(output_list), columns = ['calc0', 'calc1', 'calc2', 'pos1', 'pos2']) # # calculated_df.insert(column =['true0'], value = true_label_df['true0'] ) # calculated_df.insert(column =['true1'], value = true_label_df['true1'] ) # calculated_df.insert(column =['true2'], value = true_label_df['true2'] ) ##converting output list to pandas dataframe ''' fprintf(output); % fprintf('No. %d :The Cobb Angles(PT, MT, TL/L) are %3.1f, and the two most tilted vertebrae are %d and %d. ',... % k,CobbAn,pos2,pos1(pos2)); pause(200) close all CobbAn_ap = [] CobbAn_lat = [] %isempty(strfind(lower(fileNames{k}),'lateral')) %AP인지 LAT인지에 따라 다른듯!! CobbAn_ap = [CobbAn_ap ; cob_angles]; %cobb angles CobbAn_lat = [CobbAn_lat ; cob_angles]; %cobb angles end % write to csv file csvwrite('angles_ap.csv',CobbAn_ap); csvwrite('angles_lat.csv',CobbAn_lat); csvwrite('landmarks_ap.csv',landmarks_ap); csvwrite('landmarks_lat.csv',landmarks_lat); fid = fopen('filenames_aplat.csv','wt'); if fid>0 for k=1:N fprintf(fid,'%s\n',fileNames_im{k}); end fclose(fid); end '''
b9225503622b86e3ec228234c66b993523b55503
af7918556ee00c2a9fcc3ba1ccabe96fd97608d9
/shell/Outputs/logger.py
cc903b42aa7957571472c174295cb3c5c1e697f7
[ "MIT" ]
permissive
shipcod3/Shellsploit
ea3fbe9a863bad83a2033aac57dfeef4645a97ca
3df35f2542a3b66dd2c1576d37ccb9c5098dd075
refs/heads/master
2021-01-15T18:31:35.571866
2016-01-25T19:39:25
2016-01-25T19:39:25
50,404,888
1
0
null
2016-01-26T05:09:33
2016-01-26T05:09:33
null
UTF-8
Python
false
false
885
py
#------------------Bombermans Team---------------------------------# #Author : B3mB4m #Concat : [email protected] #Project : https://github.com/b3mb4m/Shellsploit #LICENSE : https://github.com/b3mb4m/Shellsploit/blob/master/LICENSE #------------------------------------------------------------------# from random import randint import os def logs( data=None, extension=None): while True: if extension == None: name = "%s" % str(randint(0, 999999999)) else: name = "%s.%s" % (str(randint(0, 999999999)),extension) if not os.path.isfile(name): break if extension == None: logs = open(name, "wb") else: logs = open(name, "w") logs.write(data) logs.close() if extension != None: print ("\n\t[+]Script file : {0} saved !\n".format(os.getcwd()+os.sep+name)) else: print ("\n\t[+]Executable file : {0} saved !\n".format(os.getcwd()+os.sep+name))
312ce01ed5508cc564e474d31419ca393398a83f
59f021cb84bbb3f05fc9058036aab8b5da740cfd
/estimation/kinematics.py
b13cd95de44a678daf26243980447652359675cf
[]
no_license
aerorbtx/samples
2317a2621a49487f6f55b64822b593e1ec09acc8
ef33033e81996f31982590acc0fc059d0653047f
refs/heads/master
2020-04-06T07:06:35.776857
2016-09-10T20:24:19
2016-09-10T20:24:19
65,229,780
0
0
null
null
null
null
UTF-8
Python
false
false
3,391
py
import numpy as np import math def ptDist(pt1, pt2): d = pt1 - pt2 return math.sqrt(np.dot(d.transpose(),d)) def ptImpact(m, b, r_candle, cdl_ctr, pt0): A = (1.0 + m**2) B = 2 * (m * (b - cdl_ctr[1]) - cdl_ctr[0]) C = ((b - cdl_ctr[1])**2 + cdl_ctr[0]**2 - r_candle**2) poi_x = np.roots(np.array([A,B,C]).reshape(3)) poi_y = m * poi_x + b pt_a = np.array([poi_x[0],poi_y[0]]).reshape(2,1) pt_b = np.array([poi_x[1],poi_y[1]]).reshape(2,1) dist1 = ptDist(pt0,pt_a) dist2 = ptDist(pt0,pt_b) if (dist1 < dist2): return pt_a else: return pt_b def ptLine(pt1, pt2): m = (pt2[1] - pt1[1]) / (pt2[0] - pt1[0]); # check for 1/0 b = pt1[1] - m * pt1[0]; return m, b def bounce(StateVect, LastPosn): # LastPosn = np.array([x_prev, y_prev]).reshape(2,1) # Estimated positions x_k1 = StateVect[0] # next predicted x y_k1 = StateVect[1] # next predicted y # Estimated velocities # If using velocity magnitude and heading, # Vx = V * cos(hdg) # Vy = V * sin(hdg) vel_x = StateVect[2] # next predicted Vx vel_y = StateVect[3] # next predicted Vy x_left = 81 x_right = 563 y_top = 325 y_bot = 36 r_candle = 38 cdl_ctr = np.array([330,178]).reshape(2,1) ## Check for edges and corners rho_p = 0.5 # Posn restitution coeff, bug-box collision rho_v = 0.9 # Vel restitution coeff, bug-box collision if (x_k1 < x_left): x_k1 = x_left + rho_p * (x_left - x_k1) vel_x *= -rho_v elif (x_k1 > x_right): x_k1 = x_right + rho_p * (x_right - x_k1) vel_x *= -rho_v if (y_k1 < y_bot): y_k1 = y_bot + rho_p * (y_bot - y_k1) vel_y *= -rho_v elif (y_k1 > y_top): y_k1 = y_top + rho_p * (y_top - y_k1) vel_y *= -rho_v ## Check for candle rho_p = 0.5 # Posn restitution coeff, bug-candle collision rho_v = 0.9 # Vel restitution coeff, bug-candle collision pt2 = np.array([x_k1,y_k1]).reshape(2,1) if (ptDist(pt2, cdl_ctr) < r_candle): m, b = ptLine(LastPosn, pt2) poi = ptImpact(m, b, r_candle, cdl_ctr, LastPosn) d_t = ptDist(poi, cdl_ctr) - ptDist(pt2, cdl_ctr) # always > 0 d_c = ptDist(poi, pt2) d_x = math.sqrt(d_c**2 - d_t**2) rad_new_pt = math.sqrt((r_candle + rho_p * d_t)**2 + (rho_p * d_x)**2) phi = math.atan2(poi[1]-cdl_ctr[1],poi[0]-cdl_ctr[0]) if (phi < 0.0): phi += 2.0 * math.pi phi2 = math.atan2(pt2[1]-cdl_ctr[1],pt2[0]-cdl_ctr[0]) if (phi2 < 0.0): phi2 += 2.0 * math.pi theta = math.atan2(rho_p * d_x, r_candle + rho_p * d_t) if (theta < 0.0): theta += 2.0 * math.pi if (phi > phi2): theta = -theta x_k1 = cdl_ctr[0] + rad_new_pt * math.cos(phi + theta) y_k1 = cdl_ctr[1] + rad_new_pt * math.sin(phi + theta) angl = 2.0 * theta - math.pi vel_x = StateVect[2] * math.cos(angl) + StateVect[3] * math.sin(angl) vel_y = -StateVect[2] * math.sin(angl) + StateVect[3] * math.cos(angl) vel_x *= -rho_v vel_y *= rho_v return np.array([x_k1, y_k1, vel_x, vel_y]).reshape(4,1)
79c4982c18484a42aa175dd54a7a35bb773e09bc
2d442324b7107f794033ec14b82f3a655e60a37f
/webdjango/ormModel/views.py
3015de586aed3a71c90795da398e7d85f9f1fb57
[]
no_license
elephantTo/python_learn
a4f261ee91ebeb7eb2842bb1dc66559304f73544
8c97f851fc30b97224cc9e1d812c91b42c3b6025
refs/heads/master
2021-04-06T10:38:08.786007
2018-08-07T00:52:03
2018-08-07T00:52:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,050
py
from django.shortcuts import render from django.db import models from .models import Book,Category,Article,agg_Publisher,agg_Author,agg_Book,agg_BookOrder from djangoTemplate.models import FrontUser from django.http import HttpResponse from django.db.models import Avg from django.db import connection # Create your views here. def index(request): # # 1.使用ORM添加一条数据到数据库中 # book = Book(name="三国演义",author="罗贯中",price=200) # book.save() #2. 查询 #根据主键进行查找 book = Book.objects.get(pk=1) print(book) #根据其他条件查找 book = Book.objects.filter(name="三国演义") book_first = Book.objects.filter(name="三国演义").first() print(book) #3. 删除 book = Book.objects.get(pk=0) book.delete() #4. 修改 book = Book.objects.get(pk=2) book.price = 100 book.save() return HttpResponse("书籍修改成功") def foreignKey(request): article = Article(title='abc',content='111') category = Category(name='最新文章') category.save() article.category = category article.save() # article = Article.objects.first() # print(article.category.name) #如果想引用其他App里面models 就用app.model return HttpResponse("foreignKey success") def one_to_many_view(request): #一对多的关联操作 # article = Article(title="钢铁是怎样炼成的") # category = Category.objects.first() # author = FrontUser.objects.first() # # article.category = category # article.author = author # article.save() #获取分类下所有的文章 category = Category.objects.first() #relatedManager article_set会没有加入定义了releated_name #article = category.article_set.all() article = category.articles.all() print(article) article = Article(title="追风筝的人") article.author = FrontUser.objects.first() #假如定义了bulk,就可以执行article.save() #article.save() category.articles.add(article,bluk=False) category.save() return HttpResponse("一对多") def loopUp(request): #对大小写不敏感 linuxSystem可以编辑数据库设置排序规则为utf8_bin,这样子可以大小写敏感 但windows下设置没有用,因为window用的是unicode article = Article.objects.filter(title__iexact='追风筝的人') print(article) article = Article.objects.filter(title__icontains="追") print(article) #如下打印可以知道对应翻译成什么mysql语句 #仅限于filter print(type(article)) print(article.query) return HttpResponse("查询操作") def aggregate(request): #获取图书定价的平均价 avg是自己命名的别名 也可以不写Avg= result = agg_Book.objects.aggregate(avg=Avg("price")) print(result) #only querySet can use #print(result.query) print(connection.queries) return HttpResponse()
466302e21e152bea5aae3dd9eb896173e1b81c39
e9c88ec2b409df0d418d95a2c5b38de70161c1d5
/app/views.py
89764fde621e34d8cd211b66c0b26d0bcd2d4b5a
[ "MIT" ]
permissive
excal04/scanviz
49448d31f4498320a6de7132984fa221881ecf9d
a66ba1fdab475bf62bd6c0107b3ef4667bac4e4d
refs/heads/master
2023-01-24T00:56:12.018011
2019-10-26T04:25:50
2019-10-26T04:25:50
70,698,752
0
0
MIT
2022-12-26T20:15:09
2016-10-12T12:41:23
HTML
UTF-8
Python
false
false
3,035
py
""" views.py: views module for scanviz web app author: Jeff date: 10/2016 """ import json from datetime import datetime, date from flask import render_template, redirect, url_for, session, flash, request, g from config import ELASTIC_SCAN_INDEX, ELASTIC_SCAN_DOC_TYPE, UPLOAD_LOC from app import app, es from forms import ScanForm from werkzeug.utils import secure_filename import elasticsearch @app.route('/') def index(): return render_template('index.html', title="Home") # todo: check if correct format @app.route('/upload', methods=['GET', 'POST']) def upload(): form = ScanForm() if form.validate_on_submit(): # we want either text upload of file upload contents = [] if form.fileup.data: for f in request.files.getlist('fileup'): # handle multiple files contents.append(f.stream.read()) elif form.textup.data: contents.append(form.textup.data) if contents: for scan in contents: res = es.index(index=ELASTIC_SCAN_INDEX, doc_type=ELASTIC_SCAN_DOC_TYPE, body=json.loads(scan)) if res: flash('scan uploaded!') else: flash('error indexing data') return redirect(url_for('index')) return render_template('upload_scan.html', form=form) @app.route('/getsummary', methods=['POST']) def scan_summary(): # get date range from_t = datetime.min to_t = datetime.max if request.form['from'] and request.form['to']: from_t = datetime.strptime(request.form['from'], "%Y-%m-%dT%H:%M") to_t = datetime.strptime(request.form['to'], "%Y-%m-%dT%H:%M") ret = {"ports" : {}} # i'm sure there's a better way to do this... ugh. try: res = es.search(index=ELASTIC_SCAN_INDEX, q="tcp.\*.state:open") except elasticsearch.ElasticsearchException: # NotFoundError flash("no records yet") return json.dumps(ret) for hit in res['hits']['hits']: scan_t = datetime.strptime(hit["_source"]["datetime"][:-7], "%Y-%m-%d %H:%M:%S") # drops ms # only summarize data with scan time in range specified if from_t <= scan_t <= to_t: ports = hit["_source"]["tcp"].keys() for port in ports: if hit["_source"]["tcp"][port]["state"] == "open": if ret["ports"].get(port): ret["ports"][port]["count"] += 1 else: ret["ports"][port] = {"count" : 1, "name" : hit["_source"]["tcp"][port]["name"]} return json.dumps(ret) @app.route('/search', methods=['GET', 'POST']) def search(): if request.method == 'GET': return render_template('search.html') else: # POST res = es.search(index=ELASTIC_SCAN_INDEX, body={"query":{"match":{"hostnames.name":request.form['keyword']}}}) return json.dumps(res['hits']['hits'][0]) if res['hits']['hits'] else "null"
ac79ae50eecd5b13e6d228e2ad6c4ca41dbad6db
619eeaa9234d02f478c9985391322a1dbb19ba71
/models.py
1983ac31303f004ce8e10c5d6166eae1d24a2458
[]
no_license
ravg95/filmowa-anqeta
4ee894c3bc103195d5d439b395071c1790e3c5b8
ba1d2444f0b99f193becbc73961ebd42d66d86d7
refs/heads/master
2022-12-14T14:23:46.385333
2019-10-23T19:42:32
2019-10-23T19:42:32
213,427,118
0
0
null
2022-12-08T06:42:02
2019-10-07T16:01:48
Python
UTF-8
Python
false
false
1,828
py
from flask_appbuilder import Model from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, UniqueConstraint import datetime from main import db class Rating(db.Model): __tablename__ = "rating" id = db.Column(db.Integer,primary_key=True) session_id = db.Column(db.Text(), db.ForeignKey('session.session_id'), nullable=False) movie_id = db.Column(db.Integer, db.ForeignKey('info.id') ,nullable=False) rating = db.Column(db.Integer, nullable=True) __table_args__ = (UniqueConstraint('session_id', 'movie_id', name='unik'),) def __init__(self, session, mov_id, rating): self.session_id = session self.movie_id = mov_id self.rating = rating def __repr__(self): return "User %s rated film %s : %s" % (self.session_id, self.movie_id, self.rating) class User(db.Model): __tablename__ = "session" session_id = db.Column(db.Text(), primary_key=True) created_date = Column(DateTime, default=datetime.datetime.utcnow) def __init__(self, id): self.session_id = id def __repr__(self): return "%s" % (self.session_id) class MovieInfo(db.Model): __tablename__ = "info" id = db.Column(db.Integer, primary_key=True) title= db.Column(db.Text()) original_title= db.Column(db.Text()) plot= db.Column(db.Text()) director= db.Column(db.Text()) actors= db.Column(db.Text()) imdb_url= db.Column(db.Text()) poster_url= db.Column(db.Text()) def __init__(self, id, title, original_title, plot, director, actors, imdb_url, poster_url): self.id = id self.title = title self.original_title = original_title self.plot = plot self.director = director self.actors = actors self.imdb_url = imdb_url self.poster_url = poster_url
86d8e57cb4fca48342ea4c75873866cd12814068
e6d40a1789fd32bb190635c682acfd031fa200a8
/venv/Scripts/pip3-script.py
87360cb823975a1000ae6a1697b5f03b1eff8de8
[]
no_license
hemangibavasiya/AudioToSpeech
a46ebe877cb313311d90fdf05780d79024c7892e
a9643ed5286b8b73b5ed9ee7284cc5f05a3a1fbc
refs/heads/master
2022-12-17T20:55:48.699017
2020-09-21T05:03:01
2020-09-21T05:03:01
297,233,868
0
0
null
null
null
null
UTF-8
Python
false
false
430
py
#!C:\Users\Hemangi.Bavasiya\PycharmProjects\AudioToSpeech\venv\Scripts\python.exe # EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3' __requires__ = 'pip==10.0.1' import re import sys from pkg_resources import load_entry_point if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit( load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')() )
036aad658300bff6391d2817d4406e28cf1041db
f9a54e2c7d4b87ff494251f60e01235f8000d69c
/pluto_convert.py
c72763ef29d741cb076301074ae589d1552aa6c9
[ "MIT" ]
permissive
aszadzinski/BINA-simulation
8e61ab9134fab69e8b6d65215c0b0685da79e7ee
11c32e5abcd117a9d79d550d8d90028a0cc05835
refs/heads/master
2023-08-28T14:30:45.879957
2019-10-22T11:25:00
2019-10-22T11:25:00
null
0
0
null
null
null
null
UTF-8
Python
false
false
538
py
#!/usr/bin/env python3 from sys import argv with open("output_raw.evt",'r') as file: lines = file.readlines() ignore = [i for i in range(len(lines))] ignore = ignore[::4] newlines = [] for i in ignore[:-3]: for j in range(1,4): temp = lines[i+j].split() for vec in temp[:4]: newlines.append(vec+"\n") try: newfile = open("output_pluto.txt",'w') newfile.writelines(newlines) newfile.close() except: print("error in writelines")
f0716619569b500fc2433ad837d3076436dad240
7259dbcc9e32502945d362caa43d4ad380cd04ea
/企业数据库爬虫/enterprise-info-spider-master/qichacha_spider.py
87ce667e7aedf4e08b47e7e9fa9afdd10b40ea9c
[ "MIT" ]
permissive
Doraying1230/Python-Study
daa143c133262f4305624d180b38205afe241163
8dccfa2108002d18251053147ccf36551d90c22b
refs/heads/master
2020-03-29T13:46:13.061373
2018-07-26T15:19:32
2018-07-26T15:19:32
null
0
0
null
null
null
null
GB18030
Python
false
false
7,207
py
# -*- coding: gbk -*- from bs4 import BeautifulSoup import requests import xlrd import xlwt from xlutils.copy import copy import time import winsound #企查查网站爬虫类 class EnterpriseInfoSpider: def __init__(self): #文件相关 self.excelPath = 'enterprise_data.xls' self.sheetName = 'details' self.workbook = None self.table = None self.beginRow = None # 目录页 self.catalogUrl = "http://www.qichacha.com/search_index" # 详情页(前缀+firmXXXX+后缀) self.detailsUrl = "http://www.qichacha.com/company_getinfos" self.cookie = raw_input("请输入cookie:").decode("gbk").encode("utf-8") self.host = "www.qichacha.com" self.userAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36" self.headers = { "cookie" : self.cookie, "host" : self.host, "user-agent" : self.userAgent } #数据字段名17个 self.fields = ['公司名称','电话号码','邮箱','统一社会信用代码','注册号','组织机构代码','经营状态','公司类型','成立日期','法定代表人','注册资本', '营业期限','登记机关','发照日期','公司规模','所属行业','英文名','曾用名','企业地址','经营范围'] #爬虫开始前的一些预处理 def init(self): try: #试探是否有该excel文件,#获取行数:workbook.sheets()[0].nrows readWorkbook = xlrd.open_workbook(self.excelPath) self.beginRow = readWorkbook.sheets()[0].nrows #获取行数 self.workbook = copy(readWorkbook) self.table = self.workbook.get_sheet(0) except Exception,e: print e self.workbook = xlwt.Workbook(encoding='utf-8') self.table = self.workbook.add_sheet(self.sheetName) #创建表头字段 col = 0 for field in self.fields: self.table.write(0,col,field.decode('gbk').encode('utf-8')) col += 1 self.workbook.save(self.excelPath) self.beginRow = 1 print "已在当前目录下创建enterprise_data.xls数据表" #从keyword/1页 得到的html中获得总页码数 def getTotalPage(self,catalogPageCode): soup = BeautifulSoup(catalogPageCode,"html.parser") pagebar = soup.select("li #ajaxpage") if pagebar == None or pagebar == []: return -1 return int(soup.select("li #ajaxpage")[-1].string.strip(' .')) #从keyword/page页 得到html中的所有公司条目 def getFirmIdDoms(self,catalogPageCode): soup = BeautifulSoup(catalogPageCode,"html.parser") return soup.select("#searchlist .table-search-list .tp2 a") #爬虫开始 def start(self): keyword = raw_input("请输入关键字:").decode("gbk").encode("utf-8") while keyword != "end": #先获取keyword第一页内容的页码 totalPage = self.getTotalPage(self.getCatalogPageCode(keyword, 1)) if totalPage == -1: # 请求下一轮查询的关键字 keyword = raw_input("爬取结束,请输入关键字:").decode("gbk").encode("utf-8") continue #模拟翻页操作 for page in range(1,totalPage+1): print "正在爬取第",page,"页的数据,请稍等..." #获取第page页代码 catalogPageCode = self.getCatalogPageCode(keyword,page) firmIdDoms = self.getFirmIdDoms(catalogPageCode) for firmIdDom in firmIdDoms: firmId = firmIdDom['href'][6:-6] companyname = "" #公司名 for string in firmIdDom.strings: companyname += string tdDom = firmIdDom.find_parent().find_parent() phoneDom = tdDom.select('.i-phone3') emailDom = tdDom.select('.fa-envelope-o') phone = "" email = "" if phoneDom != None and phoneDom != []: phone = phoneDom[0].next_sibling.strip() #手机 if emailDom != None and emailDom != []: email = emailDom[0].next_sibling.strip() #邮箱 detailsPageCode = self.getDetailsPageCode(firmId,companyname) self.writeDetailsToExcel(detailsPageCode,companyname,phone,email) time.sleep(0.3) #0.5s后再爬防止反爬虫机制 #请求下一轮查询的关键字 keyword = raw_input("爬取结束,请输入关键字:").decode("gbk").encode("utf-8") print "爬虫已完全结束!" #根据keyword和page构造查询串 #其中keyword中的空格换成+ #返回查询字符串构成的字典 def getCatalogQueryString(self,keyword,page): keyword.replace(' ','+') return {"key": keyword, "index": "0", "p": page} def getDetailQueryString(self,firmId,companyname): return {"unique": firmId, "companyname":companyname,"tab": "base"} # 根据keyword关键字获取目录页代码 def getCatalogPageCode(self, keyword, page): queryString = self.getCatalogQueryString(keyword, page) response = requests.request("GET", self.catalogUrl, headers=self.headers, params=queryString) return response.text # 根据firmId获取公司的详情页代码 def getDetailsPageCode(self,firmId,companyname): queryString = self.getDetailQueryString(firmId,companyname) response = requests.request("GET", self.detailsUrl, headers=self.headers, params=queryString) return response.text #抓取detailsPageCode页上该企业所有信息,并存入excel def writeDetailsToExcel(self,detailsPageCode,companyname,phone,email): detailDoms = self.getDetailDoms(detailsPageCode) self.table.write(self.beginRow, 0, companyname) self.table.write(self.beginRow, 1, phone) self.table.write(self.beginRow, 2, email) col = 3 for detailDom in detailDoms: detailName = detailDom.label.string.strip()[:-1] detailValue = detailDom.label.next_sibling.string.strip() while col < len(self.fields): # 找到匹配的那列字段 if detailName == self.fields[col].decode('gbk'): self.table.write(self.beginRow, col, detailValue) #写入excel col += 1 break else: col += 1 self.workbook.save(self.excelPath) # 保存至文件 self.beginRow += 1 #根据detailsPageCode获得它的所有detailDoms元素 def getDetailDoms(self,detailsPageCode): soup = BeautifulSoup(detailsPageCode,"html.parser") return soup.select(".company-base li") ######## #爬虫入口 ######## spider = EnterpriseInfoSpider() spider.init() spider.start()
828a25e8e7bc96ed9dc323886aa1b47245e66420
50a4af2cc044fe9fb6b2dfb4f612bdefb18153b6
/src/seasons/models.py
7d19ce506a2fe325365b2a4a07377a403bbd7068
[ "MIT" ]
permissive
pawelad/nba-rank
45bbc2aa798c40ef811d9c617eb4768b7cebd90d
ff9f80609c25435307608002ee3b36951e337043
refs/heads/master
2022-07-15T15:53:01.385873
2016-08-14T16:48:20
2016-08-14T16:48:20
53,074,080
1
0
null
null
null
null
UTF-8
Python
false
false
2,823
py
from django.db import models from django.utils.translation import ugettext_lazy as _ from django_extensions.db.models import TimeStampedModel from trueskill import Rating, MU, SIGMA class Season(TimeStampedModel, models.Model): abbr = models.CharField( verbose_name=_("season"), max_length=16, ) class Meta: verbose_name = _("season") verbose_name_plural = _("seasons") ordering = ['-abbr'] def __str__(self): return self.abbr class PlayerSeason(TimeStampedModel, models.Model): player = models.ForeignKey( 'players.Player', related_name='seasons', verbose_name=_("player"), ) team = models.ForeignKey( 'teams.Team', related_name='players_seasons', verbose_name=_("team"), null=True, ) season = models.ForeignKey( 'seasons.Season', related_name='players_seasons', verbose_name=_("season"), ) # Mostly counting overall number of votes while # not creating another model just for it votes_win = models.PositiveIntegerField( verbose_name=_("votes win"), default=0, editable=False, ) votes_tie = models.PositiveIntegerField( verbose_name=_("votes tie"), default=0, editable=False, ) # trueskill rating_mu = models.FloatField( verbose_name=_("Rating MU"), default=MU, ) rating_sigma = models.FloatField( verbose_name=_("Rating SIGMA"), default=SIGMA, ) # Stats pts = models.FloatField(verbose_name="PTS") reb = models.FloatField(verbose_name="REB") ast = models.FloatField(verbose_name="AST") stl = models.FloatField(verbose_name="STL") blk = models.FloatField(verbose_name="BLK") fg_pct = models.FloatField(verbose_name="FG%") fg3_pct = models.FloatField(verbose_name="3P%") ft_pct = models.FloatField(verbose_name="FT%") # NBA API ROSTERSTATUS = models.PositiveSmallIntegerField( verbose_name="ROSTERSTATUS", ) GAMES_PLAYED_FLAG = models.CharField( verbose_name="GAMES_PLAYED_FLAG", max_length=8, ) class Meta: verbose_name = _("player season") verbose_name_plural = _("player seasons") ordering = ['-season', '-rating_mu', 'rating_sigma'] unique_together = ('player', 'season') def get_team_name(self): """Return player's season team name.""" if self.team: return '{0.city} {0.name}'.format(self.team) else: return _("No team") def get_rating(self): """Return current `Rating` instance with data from database.""" return Rating(mu=self.rating_mu, sigma=self.rating_sigma) def __str__(self): return '{0.player} ({0.season})'.format(self)
e3b6daf4afa3a6c1d997f41a17ccdc72323baac7
531c47c15b97cbcb263ec86821d7f258c81c0aaf
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_06_01/aio/operations_async/_vpn_site_links_operations_async.py
1d926c77f371ce3542dd2c6a7b9deb2b8a378567
[ "LicenseRef-scancode-generic-cla", "LGPL-2.1-or-later", "MIT" ]
permissive
YijunXieMS/azure-sdk-for-python
be364d3b88204fd3c7d223df23756386ff7a3361
f779de8e53dbec033f98f976284e6d9491fd60b3
refs/heads/master
2021-07-15T18:06:28.748507
2020-09-04T15:48:52
2020-09-04T15:48:52
205,457,088
1
2
MIT
2020-06-16T16:38:15
2019-08-30T21:08:55
Python
UTF-8
Python
false
false
8,418
py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class VpnSiteLinksOperations: """VpnSiteLinksOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2019_06_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def get( self, resource_group_name: str, vpn_site_name: str, vpn_site_link_name: str, **kwargs ) -> "models.VpnSiteLink": """Retrieves the details of a VPN site link. :param resource_group_name: The resource group name of the VpnSite. :type resource_group_name: str :param vpn_site_name: The name of the VpnSite. :type vpn_site_name: str :param vpn_site_link_name: The name of the VpnSiteLink being retrieved. :type vpn_site_link_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VpnSiteLink, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_06_01.models.VpnSiteLink :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.VpnSiteLink"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'), 'vpnSiteLinkName': self._serialize.url("vpn_site_link_name", vpn_site_link_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('VpnSiteLink', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks/{vpnSiteLinkName}'} # type: ignore def list_by_vpn_site( self, resource_group_name: str, vpn_site_name: str, **kwargs ) -> AsyncIterable["models.ListVpnSiteLinksResult"]: """Lists all the vpnSiteLinks in a resource group for a vpn site. :param resource_group_name: The resource group name of the VpnSite. :type resource_group_name: str :param vpn_site_name: The name of the VpnSite. :type vpn_site_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ListVpnSiteLinksResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ListVpnSiteLinksResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ListVpnSiteLinksResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_vpn_site.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') else: url = next_link query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = 'application/json' # Construct and send request request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('ListVpnSiteLinksResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_by_vpn_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks'} # type: ignore
5121b71195a1b12b9128355e076bfc38b72f7453
09653acdd8ae79826b4fc7456d7e35322f0f27a8
/second.py
51d6bc212e026427c5454f3a47e6eb7fda4d0d53
[]
no_license
jannza/aima
ea2d7515b86089593cebef4c399016957433fcbd
99b7116257b463f82ad3de5ac8451815f2b052a1
refs/heads/master
2020-05-18T06:59:12.820969
2013-12-19T13:56:05
2013-12-19T13:56:05
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,747
py
import logic import itertools KB = logic.PropKB() #initial state KB.tell(logic.expr("At_0(Flat, Axle) & ~At_0(Flat, Ground) & ~At_0(Flat, Trunk)")) KB.tell(logic.expr("~At_0(Spare, Axle) & ~At_0(Spare, Ground) & At_0(Spare, Trunk)")) # KB.tell(logic.expr("At_0(Flat, Axle) & At_0(Spare, Trunk)")) # #initial locations imply parts are not elsewhere at the same time # KB.tell(logic.expr("~At_0(Flat, Axle) | (~At_0(Flat, Trunk) & ~At_0(Flat, Ground))")) #first KB.tell(logic.expr("~Remove_0(Flat, Axle) | At_0(Flat, Axle)")) KB.tell(logic.expr("~PutOn_0(Flat, Axle) | At_0(Flat, Ground) ")) KB.tell(logic.expr("~PutOn_0(Flat, Axle) | ~At_0(Flat, Axle) ")) KB.tell(logic.expr("~Remove_0(Flat, Trunk) | At_0(Flat, Trunk)")) #second KB.tell(logic.expr("~Remove_0(Flat, Axle) | At_1(Flat, Ground)")) KB.tell(logic.expr("~PutOn_0(Flat, Axle) | At_1(Flat, Axle)")) KB.tell(logic.expr("~Remove_0(Flat, Trunk) | At_1(Flat, Ground)")) #third KB.tell(logic.expr("~Remove_0(Flat, Axle) | ~At_1(Flat, Axle)")) KB.tell(logic.expr("~PutOn_0(Flat, Axle) | ~At_1(Flat, Ground)")) KB.tell(logic.expr("~Remove_0(Flat, Trunk) | ~At_1(Flat, Trunk)")) #fourth KB.tell(logic.expr("At_0(Flat, Axle) | ~At_1(Flat, Axle) | PutOn_0(Flat, Axle)")) KB.tell(logic.expr("At_0(Flat, Ground) | ~At_1(Flat, Ground) | Remove_0(Flat, Axle) | Remove_0(Flat, Trunk)")) KB.tell(logic.expr("At_0(Flat, Trunk) | ~At_1(Flat, Trunk)")) #fifth KB.tell(logic.expr("~At_0(Flat, Axle) | At_1(Flat, Axle) | Remove_0(Flat, Axle)")) KB.tell(logic.expr("~At_0(Flat, Ground) | At_1(Flat, Ground) | PutOn_0(Flat, Axle)")) KB.tell(logic.expr("~At_0(Flat, Trunk) | At_1(Flat, Trunk) | Remove_0(Flat, Trunk)")) #list of all possible actions actions = ["Remove_0(Flat, Axle)", "PutOn_0(Flat, Axle)", "Remove_0(Flat, Trunk)"] #sixth, this can somewhat be automated for elem in itertools.combinations(actions, 2): KB.tell(logic.expr("~"+elem[0]+" | ~"+elem[1])) # KB.tell(logic.expr("~Remove_0(Flat, Axle) | ~PutOn_0(Flat, Axle)")) # KB.tell(logic.expr("~PutOn_0(Flat, Axle) | ~Remove_0(Flat, Axle)")) #seventh, can be also automated seventh = "" for elem in actions: seventh = seventh + elem + " | " seventh = seventh[:-2] KB.tell(logic.expr(seventh)) #add goal KB.tell(logic.expr("At_1(Flat, Ground)")) #some manual cnf string = "" for elem in KB.clauses: # print(elem) elem = logic.to_cnf(str(elem)) string = string + str(elem) + " & " string = string[:-2] # print(string) #print only true values answer = logic.dpll_satisfiable(string) if answer != False: for elem in answer: if answer[elem]: print(str(elem)+ " : " +str(answer[elem])) else: print(answer) # print(logic.dpll_satisfiable(string))
cfa054562d3a40b9a8a30a44717fbb5aa551f138
4d0d0aec77a98e637f88f76f28b2349187a73232
/peterfilter.py
b0728e23e7273d28d5d79eaf0dc1c239700102a8
[]
no_license
wukong4430/UnderGraduatedPaper
7c27fdbee80b79622ad6a02028f919ed04309ce3
4d2bc662f4104f3351aa29da016639c936ab4f10
refs/heads/master
2020-03-08T12:52:14.951738
2018-04-30T01:38:01
2018-04-30T01:38:01
128,141,072
0
0
null
null
null
null
UTF-8
Python
false
false
3,854
py
# -*- coding: utf-8 -*- # @Author: Kicc Shen # @Date: 2018-04-26 12:39:35 # @Last Modified by: Kicc Shen # @Last Modified time: 2018-04-26 15:48:50 from Processing import Processing import numpy as np import math class peter_filter: def __init__(self, X_src, y_src, X_tar, y_tar): self.X_src = X_src self.y_src = y_src self.X_tar = X_tar self.y_tar = y_tar def CalculateD(self, row_i, row_j): size = row_i.shape[0] sum_distance = 0.0 for i in range(size): sum_distance += pow((row_i[i] - row_j[i]), 2) distance = math.sqrt(sum_distance) return distance def transform(self): X_train, Y_train = self.X_src, self.y_src test_X_array, test_Y_array = self.X_tar, self.y_tar X_first_train, Y_first_train = self.X_src, self.y_src # 至此我们得到了测试集和需要筛选的训练集,现在就是要遍历筛选出我们需要的和测试集相似的训练集 all_group_pair = [] # all_group用来保存测试集和训练集的簇,以便于下一步的筛选 for train_data_i in range(X_train.shape[0]): min_distance = float("inf") # 将初始距离设置为正无穷 save_test_x = [] save_test_y = [] each_group = [] each_x_group = [] each_y_group = [] each_x_group.append(list(X_first_train[train_data_i])) each_y_group.append(Y_first_train[train_data_i]) for test_data_j in range(test_X_array.shape[0]): # 计算训练集中每一个元素与每一个测试集之间的距离大小,选最小的,保存为改测试集的“粉丝” distance = self.CalculateD( X_first_train[train_data_i], test_X_array[test_data_j]) if distance < min_distance: save_test_x = list(test_X_array[test_data_j]) save_test_y = test_Y_array[test_data_j] min_distance = distance each_x_group.append(save_test_x) each_y_group.append(save_test_y) each_group.append(each_x_group) each_group.append(each_y_group) all_group_pair.append(each_group) # all_group_pair:[[[[1.0, 1.0], [2.0, 3.0]], [1.0, 0.0]]] 第一个是训练集,第二个是测试集,第三个是两个集对应的标签 # 至此,得到每一个彩色球与他最近的白球的组合对,接下来,反着求白球与这些组队中最近的彩色球,这些彩色球将作为训练集 second_train_X_data = [] second_train_Y_data = [] for i in range(test_X_array.shape[0]): min_distance = float("inf") # 将初始距离设置为正无穷 save_train_x = [] save_train_y = [] for j in range(len(all_group_pair)): if list(test_X_array[i]) in all_group_pair[j][0]: distance = self.CalculateD(np.array( all_group_pair[j][0][0]), np.array(all_group_pair[j][0][1])) if distance < min_distance: save_train_x = all_group_pair[j][0][0] save_train_y = all_group_pair[j][1][0] min_distance = distance if len(save_train_x) > 0: second_train_X_data.append(save_train_x) second_train_Y_data.append(save_train_y) second_train_X_data = np.array(second_train_X_data) second_train_Y_data = np.array(second_train_Y_data) # print(second_train_X_data.shape) # print(second_train_Y_data.shape) # print(test_X_array.shape) # print(test_Y_array.shape) return second_train_X_data, second_train_Y_data, test_X_array, test_Y_array
3f4646c2837a40a78c375c8b3891626f2fd21970
99b857cc7d419f01a2396e99558f3f68ddb0f806
/mdp/mdp_optimizer.py
1582cf91a620557ad163320e29f211e929ca7e6e
[]
no_license
kylebillemeyer/rl
5d288fa2ff9963ea87afba1f4ac0872c0674f42c
8b3aa93955389f858aaaaed7c391195d90048d7e
refs/heads/master
2021-01-20T08:06:33.357842
2017-05-22T13:42:34
2017-05-22T13:42:34
90,094,454
0
0
null
null
null
null
UTF-8
Python
false
false
6,095
py
import numpy as np """ mdp * [N x A] * int int -> [N x 1]: A model-free prediction algorithm that uses a first pass monte carlo solution to estimate the value function of an unknown. Note, this function cheats a bit and uses the underlying mdp structure to perform the operation on all states. In a real model-free scenario we would only know of states we've visited. The true purpose of this function would be to estimate the value of a given state. """ def first_pass_monte_carlo(mdp, policy, num_trials, learning_rate): vpi = np.zeros(mdp.num_states) for i in range(mdp.num_states): state = mdp.states[i] for trial in range(num_trials): vpi[i] = temporal_difference( mdp, policy, state, vpi[i], learning_rate, -1) return vpi """ mdp * [N x A] * int -> [N x 1]: A model-free prediction algorithm that uses a first pass monte carlo solution to estimate the value function of an unknown. Every pass differs from frist pass in that each visit to the state in question that occurs within a trials will weigh the average. This trades off the number of trials needed to be run in a mdp with cycles in order to have a useful estimation of the value function. However, doing this introduces some bias to the evaluation because every cyclical path implicitly has a non-cyclical subpath with will be weighed twice for every trial. Note, this function cheats a bit and uses the underlying mdp structure to perform the operation on all states. In a real model-free scenario we would only know of states we've visited. The true purpose of this function would be to estimate the value of a given state. """ def every_pass_monte_carlo(mdp, policy, numTrials): vpi = np.zeros(mdp.num_states) for i in range(mdp.num_states): state = mdp.states[i] visits = 0 running_average = 0 for trial in range(numTrials): totalRewards = 0 path = mdp.sample_path(state, policy) for node in reversed(path): totalRewards += node[1] if node[0] == state: running_average = ((running_average * visits) + totalRewards) / (visits + 1) visits += 1 vpi[i] = running_average return vpi """ mdp * [A x N] * state * float * float * int: Performs a single iteration temporal difference learning. Effectively, it provides a directional update of the value estimation for a given state. More specifically, the function estimates the the "target" value of the current state by running a trial with fixed look ahead (the number of actions to be taken is defined by learning_depth). It the determines the value error as the difference between the target value and the expected value. The expected value is updated in the direction of the error by a factor determined by learning_rate. Note, setting learning_depth to -1 will effectively make this a single trial monte-carlo simulation. Here, -1 means run until the look ahead terminates. """ def temporal_difference(mdp, policy, state, expected_value, learning_rate, learning_depth): td_target = mdp.sample(state, policy, learning_depth) td_error = td_target - expected_value return expected_value + (learning_rate * td_error) """ mdp * [N x A] * int -> [N x 1]: Calculate the value function for a given policy function using a dynamic programming methodology. The policy function is an N x A matrix where N is the number of states and A is the number of actions that can be taken in general. Each value for index postion (n, a) is the probablity of taking that action. If a given action a is not valid for a given state n, then index [n, a] should be 0. Each row vector n must add up to 1. The result is an N x 1 matrix where N is the number of states. Each value for index n is the expected reward for state n. """ def calc_value_func_dynamic(mdp, policy, iterations): num_states = len(mdp.states) num_actions = len(mdp.actions) v = np.zeros(num_states) for i in range(iterations): vi = np.zeros(num_states) for j in range(num_actions): vi += policy[j, :] * (mdp.rewards[j] + (mdp.discount * mdp.probabilityMat[j].dot(v))) v = vi return v """ mdp * [N x A] * int -> [N x 1]: Calculates the optimal policy function for a given mdp. It does this by doing a single dynamic update of the estimated value function for a starting policy function, then updates the policy function using a greedy policy, i.e. a policy that says to always choose the action with the highest estimated expected reward. It can be proven that applying this two step process iteratively is guaranteed to converge on the optimal policy function. """ def optimized_policy_func(mdp, policy, iterations): num_states = len(mdp.states) num_actions = len(mdp.actions) v = np.zeros(num_states) for i in range(iterations): vi = np.zeros(num_states) for j in range(num_actions): vi += policy[j, :] * (mdp.rewards[j] + (mdp.discount * mdp.probabilityMat[j].dot(v))) v = vi policy = greedy_policy(v, mdp.encoded, mdp.num_states, mdp.num_actions) #print v #print policy return (v, policy) """ [N x 1] * [Transition] * int:A * int:N -> [A x N]: Given a value function, builds the policy function that would always choose the action with the highest estimated reward. """ def greedy_policy(vpi, transitions, num_states, num_actions): best_actions = {} # {from_state, (action, expected_reward)} #print transitions # Go through each transition and update the cache if this action has a higher # expected reward. for t in transitions: from_state = t[0] to_state = t[2] action = t[1] expected_reward = vpi[to_state] best_action = best_actions.setdefault(from_state, (action, expected_reward)) #print "{}, {}, {}, {}".format(from_state, action, to_state, expected_reward) if best_action[1] <= expected_reward: best_actions[from_state] = (action, expected_reward) #print best_actions # build the policy matrix based on each states best action policy = np.zeros((num_actions, num_states)) for state in range(num_states): best_action = best_actions[state] #print best_action if best_action is None: policy[0, state] = 1 else: policy[best_action[0], state] = 1 return policy
5cddde05175442e01856c2095941d4bcca1dac66
2d19bc07e5d4b9fba2fe0b04a79ca36727cede52
/Project Euler/29/distinct-powers.py
c3a8e04adee7b5424bb250af2f6e68dfe2a6c5f8
[]
no_license
BobbyRobillard/Elixir
951ca3c2bfb8b828b35d3136e1198aed661f74d0
15e174213839b3890e867665f5fe44b0887d0115
refs/heads/master
2020-06-16T19:39:39.769706
2020-04-28T00:26:38
2020-04-28T00:26:38
195,681,600
0
0
null
null
null
null
UTF-8
Python
false
false
224
py
import math powers = [] for i in range(2, 101): for j in range(2, 101): result = math.pow(i, j) if result not in powers: powers.append(result) print("Length: {0}".format(str(len(powers))))
8ab82917bd1e0a487afc6ae3b6072199632ea5d6
1bd83c1f26ed0e88565a5c5488f4385d0500e7d1
/runner2.py
5d45f8aaa29755e10a5b6610a86426f30e83cf3e
[]
no_license
Anthony9698/Cache_Sim
e5bd13aede296ec618d75c68a7b8f9cbb930496a
478b5f429c8989df68ef5f82160804ee26474f69
refs/heads/master
2022-01-08T11:21:54.796772
2019-04-24T00:39:56
2019-04-24T00:39:56
180,197,125
0
0
null
null
null
null
UTF-8
Python
false
false
509
py
#!/usr/bin/python3 import os import time cache_size = 1 block_size = 2 assoc = 1 while assoc != 16: if cache_size == 128 and block_size == 64: cache_size = 1 block_size = 4 assoc *= 2 elif cache_size == 128: block_size *= 2 cache_size = 1 command = 'wine CacheSim.exe -f Trace2A.trc -s ', str(cache_size), " -b ", str(block_size), ' -a ', str(assoc), ' -r RR' command = ''.join(command) os.system(command) cache_size *= 2 time.sleep(5)
ffd4868ef6adfa7d751b9fe0b724b9a431f08b62
dd3b8bd6c9f6f1d9f207678b101eff93b032b0f0
/basis/AbletonLive10.1_MIDIRemoteScripts/KeyLab_Essential/control_element_utils.py
1d3916431220d03ee69e2b372a11411263145ba9
[]
no_license
jhlax/les
62955f57c33299ebfc4fca8d0482b30ee97adfe7
d865478bf02778e509e61370174a450104d20a28
refs/heads/master
2023-08-17T17:24:44.297302
2019-12-15T08:13:29
2019-12-15T08:13:29
228,120,861
3
0
null
2023-08-03T16:40:44
2019-12-15T03:02:27
Python
UTF-8
Python
false
false
1,915
py
# uncompyle6 version 3.4.1 # Python bytecode 2.7 (62211) # Decompiled from: Python 2.7.16 (v2.7.16:413a49145e, Mar 2 2019, 14:32:10) # [GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.57)] # Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/KeyLab_Essential/control_element_utils.py # Compiled at: 2019-04-23 14:43:03 from __future__ import absolute_import, print_function, unicode_literals import Live from ableton.v2.base import depends from ableton.v2.control_surface import InputControlElement, MIDI_CC_TYPE, MIDI_NOTE_TYPE from ableton.v2.control_surface.elements import ButtonElement, ColorSysexElement from . import sysex from .ringed_encoder import RingedEncoderElement @depends(skin=None) def create_button(identifier, name, channel=0, skin=None): return ButtonElement(True, MIDI_NOTE_TYPE, channel, identifier, name=name, skin=skin) @depends(skin=None) def create_pad_led(identifier, name, skin=None): def make_send_value_generator(id): def send_message_generator(v): assert isinstance(v, tuple) return sysex.LIGHT_PAD_LED_MSG_PREFIX + (id,) + v + (sysex.END_BYTE,) return send_message_generator return ColorSysexElement(skin=skin, send_message_generator=make_send_value_generator(identifier), default_value=(0, 0, 0), optimized=True, name=name) def create_ringed_encoder(identifier, ring_element_identifier, name): return RingedEncoderElement(MIDI_CC_TYPE, 0, identifier, map_mode=Live.MidiMap.MapMode.relative_signed_bit, ring_element=InputControlElement(MIDI_CC_TYPE, 0, ring_element_identifier, name=name + '_Ring_Element'), name=name)
a73c541166f512552c31b4b2d31993274081860d
698512c01048fcefcc14583089ef2e8c7962923a
/Python_Projects/Data_Visualization/Chap16_downloading_data/death_valley_highs_lows.py
66910b06e49634b5849c6f4d9d8d8527b304dbd1
[]
no_license
Miguel-Tirado/Python
c76cb9846c9a2b9c6b3c4827cdb95042f4e5d447
227def380c64095c3040c848aa035ac46d26d079
refs/heads/main
2023-04-16T12:15:03.301275
2021-04-30T16:39:48
2021-04-30T16:39:48
346,443,032
0
0
null
null
null
null
UTF-8
Python
false
false
1,257
py
import csv from datetime import datetime import matplotlib.pyplot as plt filename = 'data/death_valley_2018_simple.csv' with open(filename) as f: reader = csv.reader(f) header_row = next(reader) # Get dates and high and low temperatures from this file. dates, highs, lows = [], [], [] for row in reader: current_date = datetime.strptime(row[2], '%Y-%m-%d') try: high = int(row[4]) low = int(row[5]) except ValueError: print(f"Missing data for {current_date}") else: dates.append(current_date) highs.append(high) lows.append(low) # plot the high temperatures plt.style.use('seaborn') fig, ax = plt.subplots() ax.plot(dates, highs, c='red', alpha=0.5) ax.plot(dates, lows, c='blue', alpha=0.5) # takes one x-argument (dates) and two y-arguments (highs,lows) to fill in between ax.fill_between(dates, highs, lows, facecolor='blue', alpha = 0.1) # format plot title = "Daily high and low temperatures - 2018\nDeath Valley, CA" ax.set_title(title, fontsize=20) ax.set_xlabel('', fontsize=16) fig.autofmt_xdate() ax.set_ylabel("Temperature (F)", fontsize=16) ax.tick_params(axis='both', which='major', labelsize=16) plt.show()
0fa530a2384a6164dd95472a6f131da270803772
81053b39d353ee640af7c2f4daf61f454c1dd7d4
/Day4/day4_1.py
a13e8ba38e543b5a566cb687bd1f26ec4c600967
[]
no_license
KristiDalipaj/AdventCalendar
06a17a3a2559cf5d90c871057dede069d8a93468
da51e905c6b81cf86c49b92c9407d0d043121bfd
refs/heads/master
2021-08-31T06:03:51.666589
2017-12-20T13:43:56
2017-12-20T13:43:56
114,452,310
0
0
null
null
null
null
UTF-8
Python
false
false
421
py
f = open("a.txt", "r") sum = 0 i = 0 for s in f: i+=1 #s = f.readline() s = s[:-1] lst = s.split(" ") dict = {} for index in range(len(lst)): val = 1; if lst[index] in dict : val = dict[lst[index]] +1 dict[lst[index]] = val else : dict[lst[index]] = 1 print (dict) for key in dict: if (dict[key] > 1) : sum+=1 print (key,dict[key]) break print (i-sum,i,sum)
ee7b1b708cd89f4ead76a1c1b9e618d5b534ce4a
84b7b6ac061825ba5acad50f4b572387ae1877e2
/swaggerconformance/response.py
1c44a163fa016081f513052c5910d14eac485813
[ "MIT" ]
permissive
crunchr/swagger-conformance
8f4cb2ac28b10d600a9d4b28263916901afbad0c
e1d2288117d5c35dba9203a02d5a2a72c56403ca
refs/heads/master
2021-05-07T21:06:46.215568
2017-04-25T19:28:48
2017-04-25T19:28:48
108,993,697
0
0
null
2017-10-31T12:39:28
2017-10-31T12:39:27
null
UTF-8
Python
false
false
1,067
py
""" A response received to a Swagger API operation. """ import logging __all__ = ["Response"] log = logging.getLogger(__name__) class Response: """A response received to a Swagger API operation. :param raw_response: The raw response. :type raw_response: pyswagger.io.Response """ def __init__(self, raw_response): self._raw_response = raw_response @property def status(self): """HTTP status code of the response. :rtype: int """ return self._raw_response.status @property def body(self): """Parsed response body converted to objects via the codec in use.""" return self._raw_response.data @property def raw(self): """Raw response body. :rtype: bytes """ return self._raw_response.raw @property def headers(self): """HTTP headers received on the response. Example format is ``{'Content-Type': [xxx, xxx]}`` :rtype: dict(str, list(str)) """ return self._raw_response.header
5d4039762c1eee0e166cb3b34c6d6e2041476e07
931164b1792dfca30180f6479ef8dc9868710080
/app.py
173fd7e380f6882b31c9b6b812ad48baccf8f6b3
[]
no_license
2510lucky/WEB_APP
d0ad20569fce1287984309441cff7461df24a706
70f95cfef28653b5b857e62addfd74a25043a076
refs/heads/master
2022-12-26T18:58:53.938730
2020-09-30T20:37:23
2020-09-30T20:37:23
300,055,789
0
0
null
null
null
null
UTF-8
Python
false
false
278
py
from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SECRET_KEY'] ='secret-key' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db' db = SQLAlchemy(app) from routes import * if __name__=='__main__': app.run(debug=True)
4d188660f15633cc041221dc5eaf3701df426fe0
87870da8c8a954603f8d9a7f014a07c91c2851a9
/Exercicios/ex_088.py
03992f4b602b4eb6cab2927e6e5baae1d413b1bb
[ "MIT" ]
permissive
antoniosereno95/Python_Curso_em_Video
ac587d6ab3024f4c7a6cc761e07a685cfea2ad41
d706cba9ca7e5670881e5e06bceb5538971e99c0
refs/heads/main
2023-08-01T21:43:43.621458
2021-10-06T23:16:42
2021-10-06T23:16:42
414,398,859
0
0
null
null
null
null
UTF-8
Python
false
false
565
py
import random q = int(input('quantos jogos devo fazer? ')) for i in range(q): jogo = [0,0,0,0,0,0] for j in range(6): flag = True while flag ==True: r = random.randint(1, 61) for k in range(len(jogo)): if int(r)==int(jogo[k]): flag = True pass else: flag = False jogo.append(r) jogo.sort() jogo1 = jogo[6:12] print(jogo1) print('Esses sao seus jogos.\nBoa sorte!')
d8b0ec7d669c5c836b829c5a55a8e132ebb34fc6
0911c412ae28a19cc30fc92aa73121d4545c3476
/atividade_avaliativa/exerc_06.py
6ed8eebf747141bcc7de7021f5a987b9d87949a4
[]
no_license
niviacampos/CursoBlue_Mod1
6a69bed39e4c7d2f7a1c04e23af19bbe6dab13e4
db647fa02167722c571672d488ff70526a6bcd08
refs/heads/main
2023-05-02T11:48:38.452854
2021-05-24T21:58:53
2021-05-24T21:58:53
365,411,829
0
0
null
null
null
null
UTF-8
Python
false
false
1,497
py
# 06 - Utilizando listas faça um programa que faça 5 perguntas para uma pessoa sobre um crime. As perguntas são: # "Telefonou para a vítima?" # "Esteve no local do crime?" # "Mora perto da vítima?" # "Devia para a vítima?" # "Já trabalhou com a vítima?"  # O programa deve no final emitir uma classificação sobre a participação da pessoa no crime.  # Se a pessoa responder positivamente a 2 questões ela deve ser classificada como "Suspeita", # Entre 3 e 4 como "Cúmplice" e 5 como "Assassino".  # Caso contrário, ele será classificado como "Inocente". perguntas = ["Telefonou para a vítima?", "Esteve no local do crime?", "Mora perto da vítima?", "Devia para a vítima?", "Já trabalhou com a vítima?"] s = n = 0 resp_invalida = [] for item in range(len(perguntas)): respostas = input(f'{perguntas[item]} Sim ou Não? ').title()[0] if respostas == 'S': s += 1 elif respostas == 'N': n += 1 else: resp_invalida.append(perguntas[item]) print('Resposta inválida') print(resp_invalida) for i in range(len(resp_invalida)): while respostas != 'S' or 'N': respostas = input( f'Responda novamente. {resp_invalida[i]} Sim ou Não? ').title()[0] break if s == 5: print('Assassino.') elif s == 3 or s == 4: print('Cúmplice.') elif s == 2: print('Suspeito.') else: print('Inocente.')
acca18b4a8a032cadd293e0ff0784e8c030def0c
d5dca98583fcc96d18bb01e67cc25b24312a837c
/pygame/words.py
01c885db05c2e5440b9c52ad726947fbc9b1653f
[]
no_license
shushmitadas99/Devsnest
b97e518e1763bdf086fc7e09a99de2cb0a5fb2bc
5210503e3fe53861d08f6795ce6804821e44ceca
refs/heads/main
2023-06-05T05:15:55.520382
2021-06-25T01:55:58
2021-06-25T01:55:58
368,608,198
0
0
null
null
null
null
UTF-8
Python
false
false
377
py
words_list = [ 'devsnest', 'coding', 'developer', 'code', 'computer', 'python', 'java', 'programming', 'development', 'moderator', 'discord', 'gaming', 'software', 'hardware', 'games', 'react', 'pygame', 'technology', 'it', 'streaming', 'computing', 'messenger', 'ai', 'tracker' ]
416d4394033392ded2eb456e57afc384e280d8f0
60ffbaba15276ffebe4346e66e5cf07ab2ed96e1
/home/forms.py
4c5fc0ecf82fb6d5ae2fa64d69e230a3fb45b8ab
[]
no_license
jboyy/superboy
d7fcda734423d8f5dd0546a4e594f8f04571749e
c801689a27fb3c304ebfd53d1ce5d66661a5eed8
refs/heads/main
2023-06-26T06:09:21.889995
2021-07-24T15:37:20
2021-07-24T15:37:20
389,129,722
0
0
null
null
null
null
UTF-8
Python
false
false
177
py
from blogcomment import models from django import forms class CommentForm(forms.ModelForm): class Meta: model= models.BlogComment fields = ['name','content']
1bdb41223af879cb9d0b43af9b812aa66dba8d2c
5faadd12f76cc0341b4ff04502605edfeb8c6542
/lesson3/calc_eigen_values.py
2534f96f5a7a62e68ca639d8a235cd8aaebecfbf
[]
no_license
redpoint13/IntroToDataScience
e6de65be578f85c32f44a15131438089b9ae9033
9c825212450471500c00368a909b210dac128ad4
refs/heads/master
2021-01-10T05:06:33.552806
2015-12-30T18:25:39
2015-12-30T18:25:39
46,679,850
0
0
null
null
null
null
UTF-8
Python
false
false
227
py
import numpy as np import pandas as pd from scipy import linalg as LA A = pd.read_csv('turnstile_data_master_with_weather.csv') #A = np.random.randint(0, 10, 25).reshape(5, 5) #print A e_vals, e_vecs = LA.eig(A) print e_vals
1599974338578869309b2c2edc0ddfd986a22d5e
0b106c3c614e6f8fc7df4d8349ef80de38ae9d43
/expressionParenthesisPermutations.py
b87fc3cde3050eed1570a7651c54517dcd3ebda8
[]
no_license
shengng325/LeetCode.py
b28142421544ea6f04a84785429e79da46d4a1d6
ab8f72fbec259773f1b2ddadadec52f3132908ab
refs/heads/master
2023-02-21T08:11:56.930960
2021-01-17T04:00:40
2021-01-17T04:00:40
317,896,294
0
0
null
null
null
null
UTF-8
Python
false
false
709
py
def expressionParenthesisPermutations(string): result = [] if '+' not in string and '-' not in string and '*' not in string: result.append(int(string)) else: for i in range(0, len(string)): char = string[i] if not char.isdigit(): leftParts = expressionParenthesisPermutations(string[0:i]) rightParts = expressionParenthesisPermutations(string[i+1:]) for part1 in leftParts: for part2 in rightParts: if char == '+': result.append(part1 + part2) elif char == '-': result.append(part1 - part2) elif char == '*': result.append(part1 * part2) return result string = "2*3-4-5" results = expressionParenthesisPermutations(string) print(results)
3ef11cf4535bfd2dd7e8800eadf31777a454cce7
b1679911a25ac874e67469c9dee5a4a926a7378b
/qtile/utils.py
93abdf452723250eec937c5304bd339b78b11227
[]
no_license
ClintonKildepstein/qtile.org
4264bbf3725d052295f00ff3724e9f884cdbf884
5b5f5327e5a12218c58162b22cadc31f1b78e2e5
refs/heads/master
2023-08-01T00:47:47.650294
2021-03-18T20:12:21
2021-03-18T20:16:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
253
py
import os import yaml from django.conf import settings def load_data(name): ''' Load a YAML fixture and return the data ''' path = os.path.join(settings.BASE_DIR, 'data/{0}.yaml'.format(name)) return yaml.safe_load(open(path, 'r').read())
3795cd5e4432d0e60900990422a8f1c2321fdf11
850ed2ee4aaf5c10644955673a9fd1c3ddafaf33
/posweb/models/SaleItem.py
851fc854490fcc8183fcf4b31ca6a261d2c888f8
[]
no_license
amente/posweb
aff564f0e74374386ad708c996b2bab63fbb5081
065ffe3d07e2ba28174d32a49899023320e74f22
refs/heads/master
2020-07-09T20:18:04.615990
2014-10-10T01:40:39
2014-10-10T01:40:39
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,026
py
from sqlalchemy import ( Column, Index, Integer, Text, ) from shared import DBSession, Base class SaleItemCache(object): def __init__(self): self.dCache = {} def __getitem__(self, key): pass class SaleItem(Base): __tablename__ = "saleItems" id = Column(Integer, primary_key=True) category = Column(Text) plainName = Column(Text) value = Column(Integer) stockCount = Column(Integer) def __init__(self, category, name, val, stockCount): self.category = category self.plainName = name self.value = val self.stockCount = stockCount def __init__(self, sku, category, name, val, stockCount): self.id = sku self.category = category self.plainName = name self.value = val self.stockCount = stockCount def __json__(self, request): return {'id': self.id, 'category': self.category, 'plainName': self.plainName, 'value': self.value, 'stockCount': self.stockCount}
e6f9f6137f902eb1ee7c68acf7c996c98ad8a9f9
0a0bc9b3cf05b7fd421d9c0d7238ff739bf72753
/StudentV4BE/urls.py
f09fdbc30d0ba16ed966cf307c7fceed37aed7bf
[]
no_license
RobotZQ/StudentV4BE
fadd4c4e340d22217e75aa3265a6d75a81d4adf7
874db9f4aa1b465d14ae94211ce13b2da824f9ee
refs/heads/master
2023-04-26T15:19:19.357006
2021-05-21T14:01:03
2021-05-21T14:01:03
357,087,754
1
1
null
null
null
null
UTF-8
Python
false
false
1,803
py
"""StudentV4BE URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path from student import views from django.conf import settings from django.conf.urls.static import static urlpatterns = [ path('admin/', admin.site.urls), path('students/', views.get_students), # 获取所有学生信息的接口 path('students/query/', views.query_students), # 查询学生信息的接口 path('sno/check/', views.is_exists_sno), # 校验学号是否存在 path('student/add/', views.add_student), # 添加学生信息的接口 path('student/update/', views.update_student), # 修改学生信息的接口 path('student/delete/', views.delete_student), # 删除学生信息的接口 path('students/delete/', views.delete_students), # 删除学生信息的接口 path('upload/', views.upload), # 上传文件的接口 path('excel/import/', views.import_students_excel), # 导入Excel文件 path('excel/export/', views.export_student_excel), # 导出Excel文件 path('data/get_data', views.query_hotpic), ] #添加这行--- 允许所有的media文件被访问 urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
237267518c9648d4058ed146b2ab8ed15ae6e932
2183b81fdbbd8df6aae6f10d2b7fb36c2eb82be6
/student_accomodation/views/leadgenration.py
30f13daa51986ea661e5da6a59e1c60ec4d4838e
[]
no_license
sumittarwey/homeinabroad
e3ac40112ba1f9a38e8994ef4cb89538e908d588
fcf887837b43f4fa79ef495fb8ba4da227d270ee
refs/heads/master
2022-11-19T15:59:07.057382
2020-07-02T12:27:46
2020-07-02T12:27:46
257,499,216
0
0
null
null
null
null
UTF-8
Python
false
false
493
py
from student_accomodation.serializers.leadgenrationserializers import LeadGenerationSerializer from rest_framework import generics from student_accomodation.models import LeadGeneration from django.db.models import F class list(generics.ListCreateAPIView): queryset = LeadGeneration.objects.all() serializer_class = LeadGenerationSerializer class show(generics.RetrieveUpdateDestroyAPIView): queryset =LeadGeneration.objects.all() serializer_class = LeadGenerationSerializer
0ff2859360e9fabe9831161c47e4249cdedd0f7c
413e6908754e0c5c49f288a304ab904a77dfa0f4
/cfehome/settings.py
16d8543d4f04489b7b91f1df1bdcd3c598a04f28
[]
no_license
matgeo-2008/rest-api-2
8a8402acc3cb27e73ab1e04dd8ddb39ac3141649
a84b07237b9519b4161c647ac816b3b659eeb5c6
refs/heads/master
2020-03-09T10:57:44.301985
2018-04-10T08:02:48
2018-04-10T08:02:48
128,749,594
0
0
null
null
null
null
UTF-8
Python
false
false
3,507
py
""" Django settings for cfehome project. Generated by 'django-admin startproject' using Django 1.11. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '&75&h_35y-g)w=!85&ee!s6j=1nlfsb+_+$zxl4607(18dvvul' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'postings', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'cfehome.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'cfehome.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticatedOrReadOnly', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', 'rest_framework.authentication.SessionAuthentication', 'rest_framework.authentication.BasicAuthentication', ), } STATIC_URL = '/static/'
656c6d683b6a8f42253b65cb68903a33c5722d97
1cd7a7416d4525c4f39479d54928b8d26ae48177
/infixtopostfix.py
84d9ff1aaf5a4014e12a25a2d483fa9488fa40c1
[]
no_license
abianusha19/CompetitiveProgramming
a2727c5ed772b026cae9916a28b200c27a0fd862
904497c612c3a830c3723fd49f510c59e98222b5
refs/heads/master
2021-01-05T05:37:09.262996
2020-02-23T10:33:56
2020-02-23T10:33:56
240,899,689
0
1
null
null
null
null
UTF-8
Python
false
false
1,844
py
class infixtopostfix: def __init__(self): self.stack=[] self.rev=[] self.result=[] def push(self,x): self.rev.append(x) def priority(self,i): p1=['+','-'] p2=['*','/'] p3=['^'] if i in p1: return 1 elif i in p2: return 2 elif i in p3: return 3 else: return 0 def convertExp(self): tempstack=[] for x in self.rev: if x=='(': tempstack.append(x) elif x.isalpha() or x.isdigit(): self.result.append(x) elif x==')': c=tempstack.pop() while c!='(' and len(tempstack)!=0: self.result.append(c) if len(tempstack)!=0: c=tempstack.pop() else: p=self.priority(x) if len(tempstack)!=0: q=self.priority(tempstack[-1]) if q==0: tempstack.append(x) elif p>q: tempstack.append(x) elif p==q and x==tempstack[-1]: tempstack.append(x) else: k=tempstack.pop() while(p<=q and k!='('): self.result.append(k) if len(tempstack)!=0: r=tempstack[-1] q=self.priority(r) if q!=0: k=tempstack.pop() else: break tempstack.append(x) else: tempstack.append(x) while len(tempstack)!=0: self.result.append(tempstack.pop()) def display(self): str="" for x in range(len(self.result)): str+=self.result[x] print(str) s=input() inpre=infixtopostfix() for x in s: inpre.push(x) inpre.convertExp() inpre.display()
c6bbac994b78f8cd29b3fe3fc976543d69366759
330b667c023494525a4a7ad202c010c66e0213c5
/setup.py
889b6967ab39c2408ea7e4fd751fc0b6252ca58a
[ "MIT" ]
permissive
terry07/py-wellrng
2ac601b8ddf1f803596394b230333f7f2e97b913
c70820fba8677bd5e8ddec33976a4fd3c3694b8f
refs/heads/master
2021-01-17T06:27:56.826715
2014-06-12T08:23:22
2014-06-12T08:23:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
498
py
from distutils.core import setup setup( name = 'py-wellrng', packages = ['py-wellrng'], # this must be the same as the name above version = '0.1', description = 'A replacment for the default random lib using WELL1024a RNG', author = 'Raphael Stefanini', author_email = '[email protected]', url = 'https://github.com/rphlo/py-wellrng', download_url = 'https://github.com/rphlo/py-wellrng/tarball/0.1', keywords = ['random', 'well1024a', 'PRNG', 'RNG'], classifiers = [], )
633c5c59bb595a872b055023be218df006fd679a
00e58944bb5e1f14e427887abffe9f8a7f63da0b
/Неделя 2/минимальный_делитель.py
672bf77eb8d554f9ff2908ff32190082f204a357
[]
no_license
homosociologicus/coursera_py_basics
323367e0097a06491030080992c06d6565230568
795965b83b509697d7e74cb701aba92f3fb13de6
refs/heads/master
2022-11-21T19:53:06.665399
2020-07-26T20:55:01
2020-07-26T20:55:01
262,435,643
0
0
null
null
null
null
UTF-8
Python
false
false
63
py
N = int(input()) i = 2 while (N % i) != 0: i += 1 print(i)
d7487a0ad67a3cc0d9eabfe0bbfeb284c3a18c3f
80c8d3bc0298fec7e46b748a11f6a63ebf56d2f5
/experiments/data/k-mer14.py
e94738661eed56068685749f14ad2961f6c89bfd
[]
no_license
laureanorp/HPC_Lab3_singularity
74c015acc5c35451f2f2147af29fa5bf0bf0fdd2
34fb4bf08aa825cdb9cb70442d64a5e1264675c9
refs/heads/master
2023-02-23T23:32:41.215265
2021-01-27T14:41:11
2021-01-27T14:41:11
333,169,631
0
0
null
2021-01-27T14:41:12
2021-01-26T17:55:53
C++
UTF-8
Python
false
false
759
py
def convert(c): if (c == 'A'): return 'C' if (c == 'C'): return 'G' if (c == 'G'): return 'T' if (c == 'T'): return 'A' print("Start") opt = "ACGT" s = "" s_last = "" len_str = 14 for i in range(len_str): s += opt[0] for i in range(len_str): s_last += opt[-1] pos = 0 counter = 1 while (s != s_last): counter += 1 # You can uncomment the next line to see all k-mers. # print(s) change_next = True for i in range(len_str): if (change_next): if (s[i] == opt[-1]): s = s[:i] + convert(s[i]) + s[i+1:] change_next = True else: s = s[:i] + convert(s[i]) + s[i+1:] break # You can uncomment the next line to see all k-mers.
52fa03519391b192963ded7ab87c7e8fe05a7fab
7654aa47e4b91c0077cc0c8040fe35bde91d08aa
/flaskk/venv/bin/pytest-benchmark
c772ed6ec111dac32f586fbfc9876e1bd04bb356
[]
no_license
nithinjith40/Django
c3c05c9d9f8eecdeb98e891323d3cc00dd451e27
cf7ab1d49aac64d69daa0425f79e226cd9d5bd82
refs/heads/master
2022-11-07T19:15:48.986523
2020-01-08T14:40:27
2020-01-08T14:40:27
231,874,993
0
1
null
2022-10-12T07:27:04
2020-01-05T06:02:42
Python
UTF-8
Python
false
false
242
#!/home/expert/flaskk/venv/bin/python # -*- coding: utf-8 -*- import re import sys from pytest_benchmark.cli import main if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(main())
56d7c4773e79b81c1cfd6de92045d50f3ee78864
b7c8f15a87925595353161c6c3fc0d1b5c51193f
/hdf5_analysis_example.py
3cbce5d8a0ba84869e02fc682d9f59c3ece86fea
[ "MIT" ]
permissive
laofei177/vpic_reconnection_tutorial
a58821bde2bb1e5f02eb09a99ba1a502bf17d99a
7ba13578fe34615a118f3d98446ea94ad2b3d188
refs/heads/main
2023-03-26T06:13:37.652159
2021-03-28T03:59:54
2021-03-28T03:59:54
353,336,627
0
1
null
null
null
null
UTF-8
Python
false
false
4,117
py
import math import h5py import matplotlib.pylab as plt import numpy as np from matplotlib import rc # rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']}) # for Palatino and other serif fonts use: rc('font',**{'family':'serif','serif':['Palatino']}) rc('text', usetex=True) plt.rcParams['figure.dpi'] = 100 tableau_colors=['tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan'] pic_run = "driving_test_dipole" pic_run_dir = "/global/cscratch1/sd/xiaocan/tail_problem/" + pic_run + "/" vpic_info = get_vpic_info(pic_run_dir) def get_vpic_info(pic_run_dir): """Get information of the VPIC simulation """ with open(pic_run_dir + '/info') as f: content = f.readlines() f.close() vpic_info = {} for line in content[1:]: if "=" in line: line_splits = line.split("=") elif ":" in line: line_splits = line.split(":") tail = line_splits[1].split("\n") vpic_info[line_splits[0].strip()] = float(tail[0]) return vpic_info def plot_jy(tframe, yslice=0, show_plot=True): """Plot the y-component of the current density """ fields_interval = int(vpic_info["fields_interval"]) tindex = fields_interval * tframe smime = math.sqrt(vpic_info["mi/me"]) lx_de = vpic_info["Lx/di"] * smime lz_de = vpic_info["Lz/di"] * smime xmin, xmax = 0, lx_de zmin, zmax = -0.5 * lz_de, 0.5 * lz_de fname = (pic_run_dir + "hydro_hdf5/T." + str(tindex) + "/hydro_electron_" + str(tindex) + ".h5") with h5py.File(fname, 'r') as fh: group = fh["Timestep_" + str(tindex)] dset = group["jy"] jey = dset[:, yslice, :] fname = (pic_run_dir + "hydro_hdf5/T." + str(tindex) + "/hydro_ion_" + str(tindex) + ".h5") with h5py.File(fname, 'r') as fh: group = fh["Timestep_" + str(tindex)] dset = group["jy"] jiy = dset[:, yslice, :] nx = int(vpic_info["nx"]) nz = int(vpic_info["nz"]) xgrid = np.linspace(xmin, xmax, nx) zgrid = np.linspace(zmin, zmax, nz) len0 = 10 fig = plt.figure(figsize=[len0, len0*lz_de/lx_de]) rect = [0.12, 0.14, 0.78, 0.78] ax = fig.add_axes(rect) jy = np.squeeze(jey + jiy) im1 = ax.imshow(jy.T, extent=[xmin, xmax, zmin, zmax], vmin=-0.06, vmax=0.06, cmap=plt.cm.coolwarm, aspect='auto', origin='lower', interpolation='bicubic') # Magnetic field lines fname = (pic_run_dir + "field_hdf5/T." + str(tindex) + "/fields_" + str(tindex) + ".h5") bvec = {} with h5py.File(fname, 'r') as fh: group = fh["Timestep_" + str(tindex)] for var in ["cbx", "cbz"]: dset = group[var] bvec[var] = dset[:, 0, :] xmesh, zmesh = np.meshgrid(xgrid, zgrid) xmesh_r, zmesh_r = np.meshgrid(xgrid[::16], zgrid[::16]) start_points = np.vstack([xmesh_r.flatten(), zmesh_r.flatten()]).T ax.streamplot(xmesh, zmesh, np.squeeze(bvec["cbx"]).T, np.squeeze(bvec["cbz"]).T, color='k', linewidth=0.5, density=2) ax.set_xlim([xmin, xmax]) ax.set_ylim([zmin, zmax]) ax.set_xlabel(r'$x/d_e$', fontsize=20) ax.set_ylabel(r'$z/d_e$', fontsize=20) ax.tick_params(labelsize=16) rect_cbar = np.copy(rect) rect_cbar[0] += rect[2] + 0.01 rect_cbar[2] = 0.02 rect_cbar[1] += rect[3] * 0.25 rect_cbar[3] = rect[3] * 0.5 cbar_ax = fig.add_axes(rect_cbar) cbar = fig.colorbar(im1, cax=cbar_ax, extend='both') cbar_ax.set_title(r'$j_y$', fontsize=20) cbar.ax.tick_params(labelsize=12) twpe = math.ceil(tindex * vpic_info["dt*wpe"] / 0.1) * 0.1 text1 = r'$t\omega_{pe}=' + ("{%0.0f}" % twpe) + '$' fig.suptitle(text1, fontsize=20) # img_dir = '../img/rate_problem/absj/' + pic_run + '/' # mkdir_p(img_dir) # fname = img_dir + "absj_" + str(tframe) + ".jpg" # fig.savefig(fname, dpi=200) plot_jy(tframe, yslice=0)
2490cd4e851810ee2c2c8142647f0b3513ed71e5
43a0cfdae89d13f94943c238b78b96d8187baf3c
/thread-count-wait1.py
c0d76756fbeb1cd0c98919fa9f60c713a584f980
[]
no_license
reallybigmistake/hello-world
cfc0ba3fcce6ba31adc2f2df8983a89d37381c41
8cd97d2fbd4e72991c73fe266625489900add685
refs/heads/master
2021-01-19T02:19:50.992761
2017-04-05T07:48:15
2017-04-05T07:48:15
87,271,664
0
0
null
2017-04-05T07:48:15
2017-04-05T05:57:19
Python
UTF-8
Python
false
false
504
py
import _thread, time stdoutmutex = _thread.allocate_lock() numthreads = 5 exitmutexes = [_thread.allocate_lock() for i in range(numthreads)] def counter(myId, count, mutex): for i in range(count): time.sleep(1/(myId+1)) with mutex: print('[%s] => %s' % (myId, i)) exitmutexes[myId].acquire() for i in range(numthreads): _thread.start_new_thread(counter, (i, 5, stdoutmutex)) while not all(mutex.locked() for mutex in exitmutexes): time.sleep(0.25) print('Main thread exiting')
b6d0ee1afef4261e4dbba8942af00b85a1c0d5e8
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
/python/python_269.py
8befbf4df1bb76a09bb487be7dd1b88cba7aa3d5
[]
no_license
AK-1121/code_extraction
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
5297a4a3aab3bb37efa24a89636935da04a1f8b6
refs/heads/master
2020-05-23T08:04:11.789141
2015-10-22T19:19:40
2015-10-22T19:19:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
140
py
# Setting up Django on an internal server (os.environ() not working as expected?) sys.path.append('/path/to/flup/egg/flup-1.0.1-py2.5.egg')
2bd592fbef1d115f7081ec97b4f9ad7f1bb0f2dd
afa8259e6be4e76af4e84f8bd30d65cf9f193dfa
/scripts/list_numbers.py
28723f026be84a66ee71b0e3fb0972668862d26b
[]
no_license
Xiaofei-git/pfb2017
a7e3a6e8fc35e02f1e0d4afc689c1daa5887673b
5304fef843628d4faccaf33d1317cc8681656038
refs/heads/master
2021-07-16T05:19:23.448948
2017-10-16T19:58:38
2017-10-16T19:58:38
null
0
0
null
null
null
null
UTF-8
Python
false
false
75
py
#!/usr/bin/python3 numbers = [0,1,2,3,4] for num in numbers: print(num)
301614a7fce2108fa79f2dce31b4c051240ef640
5b86a0a86666e16b8468d6e0fe41667b91a06798
/1807/08day/02-5050.py
b864bf41f4a662218a83351ea3ced8b319ae218c
[]
no_license
nijunge/1807-2
17e58a3528e30189e306baf32f24c1119eb54d22
738731c12954c2c25b9c45f20a44e9b8bbdca1f4
refs/heads/master
2021-09-22T07:06:08.425165
2018-09-06T08:16:39
2018-09-06T08:16:39
143,671,063
0
0
null
null
null
null
UTF-8
Python
false
false
199
py
""" i = 1 a = 0 while i <101: print(i) a+=i i+=1 print(a) """ i = float(input("请输入起始值")) a = float(input("请输入终止值")) x = 0 while i < a+1: print(i) x+=i i+=1 print(x)
2f891888f7a4c5c413d0e63f574494f3fd7eccff
5fd41fac9a4bcd605b665bb9fd10f0b4f3a6f6b5
/graph/S_1_2.py
02ca84dbf59a432ce107d6faa1836c93f22fa3ab
[]
no_license
dabeiT3T/Aha-Algorithms-in-Python
b26ba6bff0f51179873bfe07d37c884391557ad6
62f7578e2a0deaf3d44dc135e8c9a5a0e28dc4be
refs/heads/master
2020-03-16T21:00:55.310918
2018-11-29T07:40:02
2018-11-29T07:40:02
132,981,203
0
0
null
null
null
null
UTF-8
Python
false
false
743
py
#!/usr/bin/env python3 ''' 5 5 <= V, E 1 2 <= G 1 3 1 5 2 4 3 5 1 2 3 5 4 <= print answer ''' # read _v, _e = map(int, input().split()) # init G ''' 0 inf inf inf 0 inf inf inf 0 ''' G = [ [0 if i == j else float('inf') for j in range(_v)] for i in range(_v) ] for i in range(_e): x, y = map(lambda n: int(n)-1, input().split()) G[x][y] = 1 G[y][x] = 1 # v has passed L = [0 for i in range(_v)] # function def bfs() -> None: global Q while Q: vertex, *Q = Q print(vertex, end=' ') for keyV, edge in enumerate(G[vertex-1]): if edge == 1 and not L[keyV]: L[keyV] = 1 Q.append(keyV+1) # init L[0] = 1 Q = [1] bfs() print()
7d153d08075565a364cc834abe9dc572466338c7
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02860/s048515681.py
334230f5adc64b662ea1dc6547263c681aabddfc
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
675
py
import sys import math import itertools import bisect from copy import copy from collections import deque,Counter from decimal import Decimal def s(): return input() def i(): return int(input()) def S(): return input().split() def I(): return map(int,input().split()) def X(): return list(input()) def L(): return list(input().split()) def l(): return list(map(int,input().split())) def lcm(a,b): return a*b//math.gcd(a,b) def gcd(*numbers): reduce(math.gcd, numbers) sys.setrecursionlimit(10 ** 9) mod = 10**9+7 count = 0 ans = 0 N = i() S = s() if N % 2 != 0: print("No") else: a = N // 2 if S[:a] == S[a:N]: print("Yes") else: print("No")
6cc23b0fdf16aa3ccec0ce8552c82f0d5b343307
088906c28ae301f1ba2e0cab92150db34c5fc934
/ckeditor_uploader/urls.py
4c14a4bc6b6f63721b4cdcf1704f0ebb756bbe84
[]
no_license
bytesun/sunorth
0f57d1871371dfdcab7320dbaba3f9409f757fc4
0a3a6aeb10967b4bc1be452a93dc2b5677261dad
refs/heads/master
2021-07-17T01:53:45.447240
2017-10-22T14:57:37
2017-10-22T14:57:37
103,062,096
0
0
null
null
null
null
UTF-8
Python
false
false
712
py
from __future__ import absolute_import import django from django.conf.urls import url from django.contrib.admin.views.decorators import staff_member_required from django.views.decorators.cache import never_cache from . import views if django.VERSION >= (1, 8): urlpatterns = [ url(r'^upload/', views.upload, name='ckeditor_upload'), url(r'^browse/', never_cache(views.browse), name='ckeditor_browse'), ] else: from django.conf.urls import patterns urlpatterns = patterns( '', url(r'^upload/', staff_member_required(views.upload), name='ckeditor_upload'), url(r'^browse/', never_cache(staff_member_required(views.browse)), name='ckeditor_browse'), )
d75890733daf5c92f160599e47e239a60b42b67f
2b76680a289e4e336d34eafb2c75b355ca44dc13
/bindings/python/setup.py
a3d86e73270dc589957976e11d962727a26ef27c
[ "Apache-2.0" ]
permissive
RobertBeckebans/audaspace
c638a0a544da20b86610c7cc46e91d26b5088e61
1bdd6d347eb1d08f04b1629b3552963227e48d8d
refs/heads/master
2020-05-29T11:40:22.724639
2014-11-12T11:41:16
2014-11-12T11:41:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,148
py
from setuptools import setup, Extension # python setup.py build # LD_LIBRARY_PATH=../../../build python # import sys # sys.path.append('/data/Work/Computer/Programming/Audaspace/audaspace/bindings/python/build/lib.linux-x86_64-3.3') audaspace = Extension('aud', include_dirs = ['@PYTHON_SOURCE_DIRECTORY@/../../include'], libraries = ['audaspace'], library_dirs = ['.'], language = 'c++', extra_compile_args = ['-std=c++11'], sources = ['@PYTHON_SOURCE_DIRECTORY@/' + file for file in ['PyAPI.cpp', 'PyDevice.cpp', 'PyHandle.cpp', 'PySound.cpp', 'PySequenceEntry.cpp', 'PySequence.cpp']]) setup (name = 'audaspace', version = '0.1', description = 'Audaspace is a high level audio library.', author = 'Jörg Müller', author_email = '[email protected]', url = 'https://github.com/neXyon/audaspace', ext_modules = [audaspace], headers = ['@PYTHON_SOURCE_DIRECTORY@/' + file for file in ['PyAPI.h', 'PyDevice.h', 'PyHandle.h', 'PySound.h', 'PySequenceEntry.h', 'PySequence.h']])
2cd364b8fad091451b59eb00b29f9d1dc121bf75
3965b85eb40009dd7c8718cfc1ebb304dc64f416
/Opencv3/data2/6/demo4.py
7fe7129641e84e98d23ba99f0c19e69698d39831
[]
no_license
ljwwwiop/python_share
2ce8cb9617cad0cac9750b06c7c9a5992809a61b
ab4474b7028f363075c8650d8323e5a89e28f0b9
refs/heads/master
2022-04-22T23:47:45.459810
2020-04-26T15:13:22
2020-04-26T15:13:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,734
py
''' 图像分割 分水岭算法(有噪声一定先降噪) 流程 1输入图像 2灰度 3二值化 4距离变换 5寻找种子 6生成Marker 7分水岭变换 8输出 ''' import cv2 as cv import numpy as np # 分水岭算法 def watered_demo(img): # 高斯模糊 cv.pyrMeanShiftFiltering(img,10,100) dst = cv.medianBlur(img, 15) gray = cv.cvtColor(dst,cv.COLOR_BGR2GRAY) ret,binary = cv.threshold(gray,0,255,cv.THRESH_BINARY|cv.THRESH_OTSU) cv.imshow("binary",binary) # morphology operation 填充上边缘 kernel = cv.getStructuringElement(cv.MORPH_RECT, (4, 4)) # iterations*2 两次开操作 nb = cv.morphologyEx(binary,cv.MORPH_OPEN,kernel,iterations=2) sure_bg = cv.dilate(nb,kernel,iterations=3) cv.imshow("sure_bg",sure_bg) # 距离计算 卷积中的3 dist = cv.distanceTransform(nb,cv.DIST_L2,3) # 距离变换结果 0-1.0程度 dist_output = cv.normalize(dist ,0,1.0,cv.NORM_MINMAX) cv.imshow("dist_output",dist_output*50) ret,surface = cv.threshold(dist,dist.max()*0.6,255,cv.THRESH_BINARY) cv.imshow("surface",surface) # markers surface_fg = np.uint8(surface) # unknown 为着色准备 unknown = cv.subtract(sure_bg,surface_fg) # 找到markers ret,markers = cv.connectedComponents(surface_fg) print(ret) # 分水岭变换 markers = markers + 1 # unknown 象素操作 markers[unknown==255] =0 markers[unknown==255] = 0 markers = cv.watershed(img,markers=markers) img[markers==-1] = [255,255,0] cv.imshow("img",img) print(img.shape) img = cv.imread("yuan.jpg") print('创建成功') cv.imshow('ljw',img) watered_demo(img) cv.waitKey(0) cv.destroyAllWindows()
d2c2c29c2a242f7d640d41d8769008222b0e4cc0
9949737f588a1edfb0bda43316f136ec9c24df10
/245/ani.py
dc91771a90674757d9c643a31d4140ebffad60f6
[]
no_license
crheacofc/UndergradMath
05c856ca817d0509fac47a322dcf3842c688f4d6
ee8d3dada0a3f7447dfa6c07f144c22d57f47b53
refs/heads/master
2021-07-04T22:06:43.609109
2017-09-26T16:29:30
2017-09-26T16:29:30
104,909,560
0
0
null
null
null
null
UTF-8
Python
false
false
3,862
py
# -*- coding: utf-8 -*- """ Created on Fri Nov 20 10:37:24 2015 @author: crhea """ # -*- coding: utf-8 -*- """ Created on Fri Nov 20 09:59:21 2015 @author: crhea """ import numpy as np from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib.colors import cnames from matplotlib import animation from Orbits import ydot_orbit_3d,trapezium_orbit, trapstep # Choose random starting points, uniformly distributed from -15 to 15 jupiter = {'xp':-3.5023653,'xv':0.00565429,'yp':-3.8169847,'yv':-0.00412490,'zp':-1.5507963,'zv':-0.00190589,'m':0.000954786104043} jupiter_traj = trapezium_orbit(ydot_orbit_3d,[jupiter['xp'],jupiter['xv'],jupiter['yp'],jupiter['yv'],jupiter['zp'],jupiter['zv']],jupiter['m'],0,100000,1000,3) saturn = {'xp':9.0755314,'xv':0.00168318,'yp':-3.0458353,'yv':0.00483525,'zp':-1.6483708,'zv':0.00192462,'m':0.000285583733151} uranus = {'xp':8.3101420,'xv':0.00354178,'yp':-16.2901086,'yv':0.00127102,'zp':-7.2521278,'zv':0.00055029,'m':0.0000437273164546} neptune = {'xp':11.4707666,'xv':0.00288930,'yp':-25.7294829,'yv':0.00114527,'zp':-10.8169456,'zv':0.00039677,'m':0.0000517759138449} pluto = {'xp':-15.5387357,'xv':0.00276725,'yp':-25.2225594,'yv':-0.00170702,'zp':-3.1902382,'zv':-0.00136504,'m':1/(1.3*10**(8))} saturn_traj = trapezium_orbit(ydot_orbit_3d,[saturn['xp'],saturn['xv'],saturn['yp'],saturn['yv'],saturn['zp'],saturn['zv']],saturn['m'],0,100000,1000,3) uranus_traj = trapezium_orbit(ydot_orbit_3d,[uranus['xp'],uranus['xv'],uranus['yp'],uranus['yv'],uranus['zp'],uranus['zv']],uranus['m'],0,100000,1000,3) neptune_traj = trapezium_orbit(ydot_orbit_3d,[neptune['xp'],neptune['xv'],neptune['yp'],neptune['yv'],neptune['zp'],neptune['zv']],neptune['m'],0,1000000,1000,3) pluto_traj = trapezium_orbit(ydot_orbit_3d,[pluto['xp'],pluto['xv'],pluto['yp'],pluto['yv'],pluto['zp'],pluto['zv']],pluto['m'],0,100000,1000,3) #for planet, in [[jupiter_traj],[saturn_traj],[uranus_traj],[neptune_traj],[pluto_traj]]: # x_t = np.array([planet[:,0],planet[:,2],planet[:,4]]) x_t = np.matrix(np.zeros((1000,3))) x_t[:,0] = np.asarray(jupiter_traj[:,0]) x_t[:,1] = np.asarray(jupiter_traj[:,2]) x_t[:,2] = np.asarray(jupiter_traj[:,4]) # Set up figure & 3D axis for anim]ation fig = plt.figure() ax = fig.add_axes([0, 0, 1, 1], projection='3d') ax.axis('off') # choose a different color for each trajectory colors = plt.cm.jet(np.linspace(0, 1, 1)) # set up lines and points lines = sum([ax.plot([], [], [], '-', c=c) for c in colors], []) pts = sum([ax.plot([], [], [], 'o', c=c) for c in colors], []) # prepare the axes limits ax.set_xlim((-10, 10)) ax.set_ylim((-10, 10)) ax.set_zlim((-10, 10)) # set point-of-view: specified by (altitude degrees, azimuth degrees) ax.view_init(30, 0) # initialization function: plot the background of each frame def init(): for line, pt in zip(lines, pts): line.set_data([], []) line.set_3d_properties([]) pt.set_data([], []) pt.set_3d_properties([]) return lines + pts # animation function. This will be called sequentially with the frame number i=0 def animate(i): # we'll step two time-steps per frame. This leads to nice results. i = 2*i for line, pt in zip(lines, pts): x, y, z = x_t[i,0],x_t[i,1],x_t[i,2] # line.set_data(x_t[:i,0],x_t[:i,1]) # line.set_3d_properties(x_t[:i,2]) pt.set_data(x, y) pt.set_3d_properties(z) #ax.view_init(30, 0.3 * i) fig.canvas.draw() return lines + pts # instantiate the animator. anim = animation.FuncAnimation(fig, animate, init_func=init, frames=500, interval=30, blit=True) # Save as mp4. This requires mplayer or ffmpeg to be installed #anim.save('lorentz_attractor.mp4', fps=15, extra_args=['-vcodec', 'libx264']) plt.show()
c62323d930d87daef428e24ed618e1428cf9da94
744abbec228b55b0338cf170235492116e369f45
/XFJ/XmApi/test_casc/Home_Signed_Tail_casc.py
e2c77280a43f012c05cc0ec12db1aa9626a1013d
[]
no_license
yebenxiaozhang/Projects_xfj
7555e47f5a64c6613da8b40b954a912f1b2ab8e4
29999c8980b962b08ac42b4e89dce4faf344ced4
refs/heads/master
2023-06-19T23:46:03.373540
2021-07-16T14:08:28
2021-07-16T14:08:28
309,929,185
0
0
null
null
null
null
UTF-8
Python
false
false
4,660
py
"""幸福小秘-成交跟踪 需求:只能输入成交的客户、或者经纪人手机号4位数 """ from XFJ.PubilcAPI.FlowPath import * # 第二步 编写测试用例 """ 1、输入成交的客户、是否可以查询结果 2、输入成交的经纪人手机号4位 是否可以查询到结果 3、输入未成交的客户、 4、输入挞定的客户 5、输入非数字 是否有相应的提示 6、输入数字过长 是否有相应的提示 """ class TestCase(unittest.TestCase): """小秘——成交跟踪""" def __init__(self, *args, **kwargs): super(TestCase, self).__init__(*args, **kwargs) self.XM_request = XmApi() self.XFK_request = XfkApi() self.XmTEXT = GlobalMap() self.XfkTEXT = GlobalMap() self.Flow = FlowPath() self.FlowPath = self.Flow self.Agent = AgentApi() self.AgentRequest = self.Agent self.AgentTEXT = GlobalMap() @classmethod def setUpClass(cls): """登录小秘 只执行一次 登录经纪人 获取ID""" cls.do_request = XmApi() cls.XmRequest = cls.do_request cls.XmRequest.ApiLogin() cls.request = AgentApi() cls.AgentRequest = cls.request cls.AgentRequest.LoginAgent() cls.AgentRequest.ForRegistrationID() cls.do_request = XfkApi() cls.to_request = cls.do_request cls.to_request.LoginXfk() def test_SignedTail_is_OK(self): """输入成交的客户、是否可以查询结果""" self.FlowPath.TheNewDeal() self.FlowPath.DealTicket() self.XFK_request.AttacheList(StartTime='', EndTime='', Page=1, Level='', Status=4, Days='') try: self.XM_request.SignedTail(keyWord=(self.AgentTEXT.get('ClientPhone'))[-4:]) self.assertEqual(1, self.XmTEXT.get('resultCode')) except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl')) def test_SignedTail_is_AgentPhone(self): """输入成交的经纪人手机号4位 是否可以查询到结果""" try: self.XM_request.SignedTail(keyWord=AgentUesr[-4:]) self.assertNotEqual(0, self.XmTEXT.get('xmcount')) except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl')) def test_Unclinched_Client(self): """输入未成交的客户、""" a = 0 self.XFK_request.AttacheList(StartTime='', EndTime='', Page=1, Level='', Status=2, Days='', vlue=a) self.XM_request.SignedTail(keyWord=self.XfkTEXT.get('xfkcustomerTel')[-4:]) try: while self.XmTEXT.get('xmcount') != 0: a = a + 1 self.XFK_request.AttacheList(StartTime='', EndTime='', Page=1, Level='', Status=2, Days='', vlue=a) self.XM_request.SignedTail(keyWord=self.XfkTEXT.get('xfkcustomerTel')[-4:]) if a == 10: break except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl')) def test_Invalid_Client(self): """输入挞定的客户""" try: self.XM_request.DealTicketList() self.XM_request.DealCancellation() self.XM_request.SignedTail(keyWord=self.XmTEXT.get('customerMobile')[-4:]) while self.XmTEXT.get('xmcount') != 0: self.XM_request.DealTicketList() self.XM_request.DealCancellation() self.XM_request.SignedTail(keyWord=self.XmTEXT.get('customerMobile')[-4:]) except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl')) def test_Non_Numeric(self): """输入非数字 是否有相应的提示""" try: self.XM_request.SignedTail(keyWord="hehe") self.assertEqual('搜索值不对!', self.XmTEXT.get('xmcontent')) except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl')) def test_Input_Long(self): """输入数字过长 是否有相应的提示""" try: self.XM_request.SignedTail(keyWord=AgentUesr[-5:]) self.assertEqual('搜索值不对!', self.XmTEXT.get('xmcontent')) except BaseException as e: print("断言错误,错误原因:%s" % e) raise RuntimeError(self.XmTEXT.get('xmurl'))
c9ca76d3d2b620f9b209d5521f9e25c825758d9b
e2f507e0b434120e7f5d4f717540e5df2b1816da
/146-ternary.py
dbce45217ce273bc5aaaaf0e6e0901268894e40e
[]
no_license
ash/amazing_python3
70984bd32ae325380382b1fe692c4b359ef23395
64c98940f8a8da18a8bf56f65cc8c8e09bd00e0c
refs/heads/master
2021-06-23T14:59:37.005280
2021-01-21T06:56:33
2021-01-21T06:56:33
182,626,874
76
25
null
null
null
null
UTF-8
Python
false
false
165
py
# Need a ternary operator? # Use conditional expression for i in range(1, 5): # is i odd or even? oe = 'odd' if i % 2 else 'even' print(f'{i} is {oe}')
facbcdb10fd8a34d6313a52d26bd90eb2725fc20
7bededcada9271d92f34da6dae7088f3faf61c02
/pypureclient/flashblade/FB_2_10/models/array_connection_get_response.py
446709419359e61c9298c8f1d19c42cf13e2fbd8
[ "BSD-2-Clause" ]
permissive
PureStorage-OpenConnect/py-pure-client
a5348c6a153f8c809d6e3cf734d95d6946c5f659
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
refs/heads/master
2023-09-04T10:59:03.009972
2023-08-25T07:40:41
2023-08-25T07:40:41
160,391,444
18
29
BSD-2-Clause
2023-09-08T09:08:30
2018-12-04T17:02:51
Python
UTF-8
Python
false
false
4,259
py
# coding: utf-8 """ FlashBlade REST API A lightweight client for FlashBlade REST API 2.10, developed by Pure Storage, Inc. (http://www.purestorage.com/). OpenAPI spec version: 2.10 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flashblade.FB_2_10 import models class ArrayConnectionGetResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'continuation_token': 'str', 'total_item_count': 'int', 'items': 'list[ArrayConnection]' } attribute_map = { 'continuation_token': 'continuation_token', 'total_item_count': 'total_item_count', 'items': 'items' } required_args = { } def __init__( self, continuation_token=None, # type: str total_item_count=None, # type: int items=None, # type: List[models.ArrayConnection] ): """ Keyword args: continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the `continuation_token` to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The `continuation_token` is generated if the `limit` is less than the remaining number of items, and the default sort is used (no sort is specified). total_item_count (int): Total number of items after applying `filter` params. items (list[ArrayConnection]) """ if continuation_token is not None: self.continuation_token = continuation_token if total_item_count is not None: self.total_item_count = total_item_count if items is not None: self.items = items def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ArrayConnectionGetResponse`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): return None else: return value def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ArrayConnectionGetResponse, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ArrayConnectionGetResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
847df896de8df2034263c361aafc473537a9e84a
e6f975f57258048dc1d3ed46b335c8771ccb01d2
/GAN/milestone.mnist/refer_mnist.py
49a800a2f137c03f1da0fee1d998dc66239e9fce
[ "MIT" ]
permissive
2018HCIntern/2018HCIntern
386f70124bb206ef337360c47438b6fc49c92831
bbc887c064b97056b6027045be1d6eb3f1ce2a35
refs/heads/master
2021-05-11T23:08:53.588476
2018-02-14T03:56:03
2018-02-14T03:56:03
117,510,413
2
4
MIT
2018-02-07T09:25:53
2018-01-15T07:14:09
Jupyter Notebook
UTF-8
Python
false
false
5,017
py
from __future__ import print_function import argparse import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torchvision import datasets, transforms from torch.autograd import Variable # Training settings parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--batch-size', type=int, default=64, metavar='N', help='input batch size for training (default: 64)') parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', help='input batch size for testing (default: 1000)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to train (default: 10)') parser.add_argument('--lr', type=float, default=0.01, metavar='LR', help='learning rate (default: 0.01)') parser.add_argument('--momentum', type=float, default=0.5, metavar='M', help='SGD momentum (default: 0.5)') parser.add_argument('--no-cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to wait before logging training status') args = parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(args.seed) if args.cuda: torch.cuda.manual_seed(args.seed) kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {} # train_loader = torch.utils.data.DataLoader( # datasets.MNIST('../data', train=True, download=True, # transform=transforms.Compose([ # transforms.ToTensor(), # transforms.Normalize((0.1307,), (0.3081,)) # ])), # batch_size=args.batch_size, shuffle=True, **kwargs) # test_loader = torch.utils.data.DataLoader( # datasets.MNIST('../data', train=False, transform=transforms.Compose([ # transforms.ToTensor(), # transforms.Normalize((0.1307,), (0.3081,)) # ])), # batch_size=args.test_batch_size, shuffle=True, **kwargs) transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) ]) train_loader = torch.utils.data.DataLoader( datasets.MNIST('data', train=True, download=True, transform=transform), batch_size=args.batch_size, shuffle=True) test_loader = torch.utils.data.DataLoader( datasets.MNIST('data', train=False, download=True, transform=transform), batch_size=args.test_batch_size, shuffle=True) class Net(nn.Module): def __init__(self): super(Net, self).__init__() self.conv1 = nn.Conv2d(1, 10, kernel_size=5) self.conv2 = nn.Conv2d(10, 20, kernel_size=5) self.conv2_drop = nn.Dropou2d() self.fc1 = nn.Linear(320, 50) self.fc2 = nn.Linear(50, 10) def forward(self, x): x = F.relu(F.max_pool2d(self.conv1(x), 2)) x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2)) x = x.view(-1, 320) x = F.relu(self.fc1(x)) x = F.dropout(x, training=self.training) x = self.fc2(x) return F.log_softmax(x) model = Net() if args.cuda: model.cuda() optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum) def train(epoch): model.train() for batch_idx, (data, target) in enumerate(train_loader): if args.cuda: data, target = data.cuda(), target.cuda() data, target = Variable(data), Variable(target) optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) loss.backward() optimizer.step() if batch_idx % args.log_interval == 0: print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( epoch, batch_idx * len(data), len(train_loader.dataset), 100. * batch_idx / len(train_loader), loss.data[0])) def test(): model.eval() test_loss = 0 correct = 0 for data, target in test_loader: if args.cuda: data, target = data.cuda(), target.cuda() data, target = Variable(data, volatile=True), Variable(target) output = model(data) test_loss += F.nll_loss(output, target, size_average=False).data[0] # sum up batch loss pred = output.data.max(1, keepdim=True)[1] # get the index of the max log-probability correct += pred.eq(target.data.view_as(pred)).cpu().sum() test_loss /= len(test_loader.dataset) print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format( test_loss, correct, len(test_loader.dataset), 100. * correct / len(test_loader.dataset))) for epoch in range(1, args.epochs + 1): train(epoch) test()
0da12a23f46262db349fee4ba3799830c82c928f
772f27c0c7e3f2df230736d3259c1a831994a887
/src/analysis/bigtemp.py
f61cd2a4ed4c5f17e0e9c6c3881ea7b061bb426a
[ "BSD-3-Clause" ]
permissive
Jacques-Florence/schedSim
04008b2da3495a4f213d9e89fb03a85ccc8e16c1
cd5f356ec1d177963d401b69996a19a68646d7af
refs/heads/master
2021-05-07T17:48:43.879031
2017-11-27T01:02:08
2017-11-27T01:02:08
108,736,406
1
0
null
null
null
null
UTF-8
Python
false
false
297
py
import matplotlib.pyplot as plt print "hello world" f = open("reports/temperatureReport.txt") line = f.readline() print line time = [] temp = [] for line in f: entry = line.split(":") time.append(float(entry[0])) temp.append(float(entry[1])) f.close() plt.plot(time, temp, 'ro') plt.show()
a9eb5f9adcda46627ab985f0f8455a43ec516c95
4db58330a9f134b40f5e3b40067a52ccca19da12
/tglib/tests/prometheus_tests.py
6785a9603a4d8cf8d59a23d95ae9dc773ad18d27
[ "MIT" ]
permissive
terragraph/tgnms
b46e6d7406a132380ac2046d1c98dba7d7e4afd2
93c0c4bef28c1ed15dc61e9fd340a9faef4902e3
refs/heads/main
2023-09-03T09:42:42.442610
2022-12-14T19:11:19
2022-12-14T19:11:19
353,065,567
15
17
MIT
2023-08-17T17:17:05
2021-03-30T16:19:24
JavaScript
UTF-8
Python
false
false
9,534
py
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import re import asynctest from tglib.clients.prometheus_client import PrometheusClient, PrometheusMetric, ops from tglib.exceptions import ClientRestartError class PrometheusClientTests(asynctest.TestCase): async def setUp(self) -> None: self.timeout = 1 self.config = {"host": "prometheus", "port": 9090} await PrometheusClient.start({"prometheus": self.config}) self.client = PrometheusClient(self.timeout) self.client._session = asynctest.CoroutineMock() async def tearDown(self) -> None: await self.client.stop() async def test_client_restart_error(self) -> None: with self.assertRaises(ClientRestartError): await self.client.start({"prometheus": self.config}) def test_duration2seconds(self) -> None: durations = {"2m": 120, "30s": 30, "3h": 10800, "1d": 86400, "1w": 604800} for duration, num_seconds in durations.items(): self.assertEqual(self.client.duration2seconds(duration), num_seconds) with self.assertRaises(ValueError): self.client.duration2seconds("2x") def test_format_query(self) -> None: labels = {"foo": "bar", "quux": True} negate_labels = {"baz": "qux", "quuz": False} query = self.client.format_query("metric", labels, negate_labels) expected_query = 'metric{foo="bar",quux="true",baz!="qux",quuz!="false"}' self.assertEqual(query, expected_query) def test_format_query_regex_label(self) -> None: labels = {"foo": re.compile("bar|baz")} negate_labels = {"qux": re.compile("quux|quuz")} query = self.client.format_query("metric", labels, negate_labels) expected_query = 'metric{foo=~"bar|baz",qux!~"quux|quuz"}' self.assertEqual(query, expected_query) def test_format_query_invalid_char_in_metric(self) -> None: query = self.client.format_query("1-metric") self.assertEqual("1-metric", query) def test_format_query_ops(self) -> None: labels = {"foo": "bar"} query = ops.abs(self.client.format_query("metric", labels)) expected_query = 'abs(metric{foo="bar"})' self.assertEqual(query, expected_query) query = ops.avg_over_time(self.client.format_query("metric", labels), "24h") expected_query = 'avg_over_time(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.count_over_time(self.client.format_query("metric", labels), "24h") expected_query = 'count_over_time(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.delta(self.client.format_query("metric", labels), "24h") expected_query = 'delta(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.diff_on( self.client.format_query("metric_1", labels), self.client.format_query("metric_2", labels), "linkName", ) expected_query = 'metric_1{foo="bar"} - on (linkName) metric_2{foo="bar"}' self.assertEqual(query, expected_query) query = ops.max_by(self.client.format_query("metric", labels), "linkName") expected_query = 'max by (linkName) (metric{foo="bar"})' self.assertEqual(query, expected_query) query = ops.min_by(self.client.format_query("metric", labels), "linkName") expected_query = 'min by (linkName) (metric{foo="bar"})' self.assertEqual(query, expected_query) query = ops.max_over_time(self.client.format_query("metric", labels), "24h") expected_query = 'max_over_time(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.quantile_over_time( self.client.format_query("metric", labels), "24h", 0.75 ) expected_query = 'quantile_over_time(0.75, metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.rate(self.client.format_query("metric", labels), "24h") expected_query = 'rate(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.resets(self.client.format_query("metric", labels), "24h") expected_query = 'resets(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) query = ops.sum_by(self.client.format_query("metric", labels), "linkName") expected_query = 'sum by (linkName) (metric{foo="bar"})' self.assertEqual(query, expected_query) query = ops.sum_over_time(self.client.format_query("metric", labels), "24h") expected_query = 'sum_over_time(metric{foo="bar"} [24h])' self.assertEqual(query, expected_query) @asynctest.patch("time.time", return_value=100) async def test_query_range(self, patched_time_time) -> None: self.client._session.get.return_value.__aenter__.return_value.json = ( asynctest.CoroutineMock() ) host = self.config["host"] port = self.config["port"] params = {"query": "foo", "step": "30s", "start": 0, "end": 100} await self.client.query_range_raw(**params) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query_range", params=params, timeout=self.timeout, ) await self.client.query_range_ts(**params) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query_range", params={**params, "query": f"timestamp({params['query']})"}, timeout=self.timeout, ) # Test that the mock value of time.time() is used when no "end" is provided del params["end"] await self.client.query_range_raw(**params) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query_range", params={**params, "end": 100}, timeout=self.timeout, ) async def test_query_range_invalid_params(self) -> None: params = {"query": "foo", "step": "30s", "start": 100, "end": 0} self.assertGreater(params["start"], params["end"]) with self.assertRaises(ValueError): await self.client.query_range(**params) with self.assertRaises(ValueError): await self.client.query_range_raw(**params) with self.assertRaises(ValueError): await self.client.query_range_ts(**params) duration_re = "[0-9]+[smhdwy]" params = {"query": "foo", "step": "bar", "start": 0, "end": 100} self.assertNotRegex(params["step"], duration_re) with self.assertRaises(ValueError): await self.client.query_range(**params) with self.assertRaises(ValueError): await self.client.query_range_raw(**params) with self.assertRaises(ValueError): await self.client.query_range_ts(**params) async def test_query_latest(self) -> None: self.client._session.get.return_value.__aenter__.return_value.json = ( asynctest.CoroutineMock() ) host = self.config["host"] port = self.config["port"] params = {"query": "foo"} await self.client.query_latest(**params) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query", params=params, timeout=self.timeout ) await self.client.query_latest_ts(**params) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query", params={"query": f"timestamp({params['query']})"}, timeout=self.timeout, ) await self.client.query_latest(**params, time=10) self.client._session.get.assert_called_with( f"http://{host}:{port}/api/v1/query", params={**params, "time": 10}, timeout=self.timeout, ) def test_write_and_poll_metrics(self) -> None: metrics = [] for i in range(10): metric = PrometheusMetric(name="foo", labels={"number": i}, value=i, time=1) metrics.append(metric) self.client.write_metrics(metrics) datapoints = self.client.poll_metrics() self.assertEqual(len(datapoints), 10) # This call returns an empty list because no metrics were written in between self.assertEqual(len(self.client.poll_metrics()), 0) def test_write_metrics_no_timestamp(self) -> None: metric = PrometheusMetric(name="foo", labels={"bar": "baz"}, value=100) self.client.write_metrics([metric]) datapoints = self.client.poll_metrics() self.assertEqual(len(datapoints), 1) self.assertEqual(datapoints[0], 'foo{bar="baz"} 100') def test_redundant_write_metrics(self) -> None: self.client.write_metrics( [PrometheusMetric(name="foo", labels={"bar": "baz"}, value=100, time=1)] ) self.client.write_metrics( [PrometheusMetric(name="foo", labels={"bar": "baz"}, value=101, time=2)] ) datapoints = self.client.poll_metrics() self.assertEqual(len(datapoints), 1) self.assertEqual(datapoints[0], 'foo{bar="baz"} 101 2') def test_poll_metrics_empty_queue(self) -> None: self.assertFalse(self.client.poll_metrics())
d9d955e2cbca2fa678bcf7faf3564bb289e5af4b
0f79fd61dc47fcafe22f83151c4cf5f2f013a992
/BOJ/2251.py
a6e41c3b07cae2ca059d66b9444e8ab8d313c5c8
[]
no_license
sangm1n/problem-solving
670e119f28b0f0e293dbc98fc8a1aea74ea465ab
bc03f8ea9a6a4af5d58f8c45c41e9f6923f55c62
refs/heads/master
2023-04-22T17:56:21.967766
2021-05-05T12:34:01
2021-05-05T12:34:01
282,863,638
0
0
null
null
null
null
UTF-8
Python
false
false
1,072
py
""" author : Lee Sang Min github : https://github.com/sangm1n e-mail : [email protected] title : 물통 description : BFS """ from collections import deque A, B, C = map(int, input().split()) visited = [[False] * 201 for _ in range(201)] def calc(a, b): global q if not visited[a][b]: visited[a][b] = True q.append((a, b)) result = [] q = deque() q.append((0, 0)) visited[0][0] = True while q: x, y = q.popleft() z = C - x - y if x == 0: result.append(z) # A -> B if x > 0 and y < B: val = min(x, B-y) calc(x-val, y+val) # B -> A if y > 0 and x < A: val = min(y, A-x) calc(x+val, y-val) # B -> C if y > 0 and z < C: val = min(y, C-z) calc(x, y-val) # C -> B if z > 0 and y < B: val = min(z, B-y) calc(x, y+val) # A -> C if x > 0 and z < C: val = min(x, C-z) calc(x-val, y) # C -> A if z > 0 and x < A: val = min(z, A-x) calc(x+val, y) result.sort() print(*result)
51039aaf928c6e38b84bd9456a353a4e62c60352
c3e5bbdc7efa768a3af1d9395441800886d309d5
/src/minimization/minimization.py
e9954cb5917ea8b4bf0280edbe77ff050775d1ac
[]
no_license
davibobsin/Horus
773749a7f770373141d4c943d4fae9a4024ef7e5
38beefdb7b27d59de6f006f2db6a96ce4a29fe5b
refs/heads/master
2020-03-28T14:32:07.250089
2019-06-04T00:50:42
2019-06-04T00:50:42
148,496,028
0
1
null
null
null
null
UTF-8
Python
false
false
7,928
py
"""This script derives the most probable profile of a regular prism from the measurements of a camera""" import matplotlib.pyplot as plt import numpy as np from scipy import optimize from scipy.cluster.hierarchy import fcluster, linkage from scipy.spatial.distance import squareform from camera import Camera from section import Section # https://stats.stackexchange.com/questions/138325/clustering-a-correlation-matrix def func(theta, r, phi, h): """func""" psy = np.add.outer(phi, theta) return r*np.cos(psy)/(h - r*np.sin(psy)) def jacobian(theta, r, phi, h): """jacobian""" psy = phi + theta sin = np.sin(psy) cos = np.cos(psy) relative_height = h - r*sin partial_derivative_radius = ( cos*relative_height-r*sin*cos)/(relative_height**2) partial_derivative_phi = ( (r*cos)**2-r*sin*relative_height)/(relative_height**2) return np.column_stack((partial_derivative_radius, partial_derivative_phi)) def guess(theta, alpha, h): """guess""" i_max = alpha.argmax() alpha_max = alpha[i_max] return np.array([h*np.sin(alpha_max), alpha_max-theta[i_max]]) def normpdf(x, mean, std): """normpdf""" denom = std*(2*np.pi)**.5 num = np.exp(-(x-mean)**2/(2*std**2)) return num/denom def std(x, mean): """std""" err = (x-mean) var = np.dot(err, err) return var**.5 def run(theta, alpha, func, jac, guess, plt_local, kernel=5, noiseless=np.array([])): """run""" half_kernel = kernel//2 trimmed_theta = theta[half_kernel:-half_kernel] trimmed_alpha = alpha[half_kernel:-half_kernel] error_list = [] for i in range(half_kernel, theta.size - half_kernel): kernel_theta = theta[i - half_kernel:i + half_kernel + 1] kernel_alpha = alpha[i - half_kernel:i + half_kernel + 1] first_guess = guess(kernel_theta, kernel_alpha) params, params_covariance = optimize.curve_fit( func, kernel_theta, kernel_alpha, p0=first_guess, jac=jac) r = params[0] phi = params[1] % (2*np.pi) fitted = func(trimmed_theta, r, phi) # plt_local.figure(figsize=(6, 4)) # plt_local.scatter(np.degrees(trimmed_theta), np.degrees( # trimmed_alpha), color=COLORS[1]) # plt_local.scatter(np.degrees(trimmed_theta), np.degrees( # fitted), color=COLORS[2]) # plt_local.show() error = trimmed_alpha-fitted error_list.append(error) error_2d = np.stack(error_list, axis=0) error_reciprocal = error_2d+error_2d.T # plt_local.figure(figsize=(6, 4)) # plt_local.imshow(error_reciprocal) # plt_local.show() correlation = normpdf(error_reciprocal, 0, std(error_reciprocal, 0)/100) correlation = (correlation.T+correlation)/2 correlation = (correlation-correlation.min()) / \ (correlation.max()-correlation.min()) dissimilarity = 1-correlation np.fill_diagonal(dissimilarity, 0) dissimilarity = (dissimilarity.T+dissimilarity)/2 # plt_local.figure(figsize=(6, 4)) # plt_local.imshow(dissimilarity) # plt_local.show() hierarchy = linkage(squareform(dissimilarity), method='average') labels = fcluster(hierarchy, 0.5, criterion='distance') unique, indices, unique_counts = np.unique( labels, return_counts=True, return_inverse=True) filtered_labels = unique[indices] # u = l2u(filtered_labels) fitted_list = [] params_list = [] for x in unique[unique_counts > kernel]: segment_indices = (labels == x) local_error = dissimilarity[:, segment_indices][segment_indices, :] # vertical_error = dissimilarity[:, segment_indices] cluster_error = sum(local_error, 0)/local_error.shape[0] cluster_alpha = trimmed_alpha[segment_indices] cluster_theta = trimmed_theta[segment_indices] plt_local.plot(np.degrees(cluster_theta), cluster_error) first_guess = guess(cluster_theta, cluster_alpha) params, params_covariance = optimize.curve_fit( func, cluster_theta, cluster_alpha, p0=first_guess, jac=jac, sigma=cluster_error) r = params[0] phi = params[1] % (2*np.pi) fitted = func(trimmed_theta, r, phi) fitted_list.append(fitted) params_list.append([r, phi]) fitted_2d = np.stack(fitted_list, axis=0) params_list = sorted(params_list, key=lambda l: l[1], reverse=True) r = [row[0] for row in params_list] phi = [row[1] for row in params_list] Xs, Ys = Section.to_xy(r, phi, True) if PLOT: # plt_local.figure(figsize=(6, 4)) # plt_local.imshow(correlation) # plt_local.figure(figsize=(6, 4)) # plt_local.imshow(u) plt_local.figure(figsize=(6, 4)) plt_local.imshow(dissimilarity) # plt_local.figure(figsize=(6, 4)) # plt_local.imshow(np.degrees(fitted_2d)) plt_local.figure(figsize=(6, 4)) plt_local.plot(Xs, Ys, marker='o', color=COLORS[2]) plt_local.gca().set_aspect('equal', adjustable='box') plt_local.figure(figsize=(6, 4)) plt_local.subplot(2, 1, 2) plt_local.scatter(np.degrees(trimmed_theta), np.degrees(trimmed_alpha), color=COLORS[1]) plt_local.plot(np.degrees(trimmed_theta), np.degrees( fitted_2d.max(0)), color=COLORS[2]) plt_local.scatter(np.degrees(trimmed_theta), filtered_labels, color=COLORS[3]) if noiseless.any(): plt_local.plot(np.degrees(theta), np.degrees( noiseless), color=COLORS[0]) plt_local.subplot(2, 1, 1) plt_local.plot(Xs, Ys, marker='o', color=COLORS[2]) plt_local.gca().set_aspect('equal', adjustable='box') return r, phi def main(): """main""" h = 100 def f(theta, r, phi): return func(theta, r, phi, h) def j(theta, r, phi): return jacobian(theta, r, phi, h) def g(theta, alpha): return guess(theta, alpha, h) cam = Camera(h) if SIMULATE: points = np.array([[1, 0], [0, 1], [-1, 0], [-1, -1], [1, -1]]) # points = np.array([[1,0.5],[0,1],[-1,0.5],[-1,-.5],[0,-1],[1,-.5]]) # points = np.array([[1,0],[0,1],[-1,0],[0,-1]]) section = Section(points) length = 36*2 theta = np.linspace(0, 2*np.pi, length) result = section.rotate(theta) # Seed the random number generator for reproducibility np.random.seed(0) true_alpha = cam.simulate_measurement(result) noisy_alpha = true_alpha + np.random.normal(size=length)/2000 r = h*np.sin(noisy_alpha) phi = noisy_alpha-theta Xs, Ys = Section.to_xy(r, phi, True) run(theta, noisy_alpha, func=f, jac=j, guess=g, kernel=3, plt_local=plt, noiseless=true_alpha) else: theta, noisy_alpha = cam.measure("./img/0520_2301/") run(theta, noisy_alpha, func=f, jac=j, guess=g, kernel=5, plt_local=plt) # if PLOT: # section.plot(plt) # plt.plot(Xs, Ys, marker='o', color=COLORS[1]) # plt.gca().set_aspect('equal', adjustable='box') # plt.figure(figsize=(6, 4)) # plt.subplot(2, 1, 1) # section.Plot(plt) # plt.subplot(2, 1, 2) # plt.plot(np.degrees(theta), np.degrees(true_alpha)) # plt.figure(figsize=(6, 4)) # plt.subplot(2, 1, 1) # section.plot(plt) # plt.plot(Xs, Ys, marker='o', color=COLORS[1]) # plt.gca().set_aspect('equal', adjustable='box') # plt.subplot(2, 1, 2) # plt.plot(np.degrees(theta), np.degrees(true_alpha)) # plt.scatter(np.degrees(theta), np.degrees( # noisy_alpha), color=COLORS[1]) plt.show() PLOT = True SIMULATE = False COLORS = plt.rcParams['axes.prop_cycle'].by_key()['color'] main()
2feaa8a3c7306194722b7faf9e73d72cedf303ba
b0a7df57997b63e049401464a77b56249f43c9ba
/petrol_station_system/__manifest__.py
2467da5d898f24e1b461ece307a6fb72276b88c4
[]
no_license
darlamichhane/OdooModules
9447e523a359298db60bd0c85174c303b2de93aa
1c94bc8157456b9250cc7a42dcd42af214902355
refs/heads/master
2020-12-09T10:56:08.691053
2020-07-26T17:15:42
2020-07-26T17:15:42
233,283,640
0
0
null
null
null
null
UTF-8
Python
false
false
1,330
py
# -*- coding: utf-8 -*- { 'name': "Petrol Station Management System", 'version': 'PSIMS-1.0', 'summary': """Manage Petrol Station Information System""", 'description': """This Module Manage Petrol Station Information System""", 'author': "Utshav Ghimire", 'company': 'Sayapatri Technology', 'website': "https://www.sayapatritech.com", 'category': 'Accounting', 'depends': ['base', 'sale', 'account', 'procurement','ab_access_control'], 'data': [ 'security/security.xml', 'security/ir.model.access.csv', 'views/res_partner.xml', 'views/petrolstation_menu.xml', 'views/token_management.xml', 'views/vehicle_info.xml', 'data/sequence.xml', 'report/token_pdf.xml', 'report/report_token.xml', 'views/sales_agreement.xml', 'views/account_invoice.xml', 'views/sale_order.xml', 'report/report_salesagreement.xml', 'report/sales_agreement_report.xml', 'report/report_token_receipt.xml', 'report/token_receipt.xml', 'views/token_sales_tree.xml', 'views/token_sales.xml', 'report/tax_invoice.xml', 'report/invoice_template.xml', ], 'license': 'AGPL-3', 'installable': True, 'auto_install': False, 'application': False, }
9d4fd5e0c553161ab2e595fadaf22623ab69a6d0
65704e2b644e6c21bc50807d25ace6ee150afd0f
/web/project/models_test.py
5598efbc7b6fb7a63a73bdb8483a87b29dd95102
[ "MIT" ]
permissive
rubenwap/Daily
968dbb57151cb2ca9262252d37acf2bf1358500b
6501926b3eff25da60a7272f8b394aeb9f259df3
refs/heads/master
2021-06-06T11:01:01.422654
2019-08-22T10:40:32
2019-08-22T10:40:32
107,892,562
2
0
MIT
2021-03-20T01:17:49
2017-10-22T18:39:26
JavaScript
UTF-8
Python
false
false
274
py
import os import models import unittest import tempfile class modelsTestCase(unittest.TestCase): def test_empty_db(self): rv = models.create_api.get('/api/') assert b'No entries here so far' in rv.data if __name__ == '__main__': unittest.main()
8bdb443082fa9fe82cc2c73f8f2937d6f033574a
10a0fa58b99312dd6853fff744b48e359decc40a
/sofutomo/urls.py
6f1dca94088b3975e160bfe27be84127e9dbeeb1
[]
no_license
django-iori/bandfriends
e5fe176a0d7805cd856f8681b41cf0010b402a9e
e097eb423fe357611a003b5caf9fb00fe9e57e1b
refs/heads/master
2023-07-31T09:24:55.819083
2021-09-17T16:30:15
2021-09-17T16:30:15
405,573,218
0
0
null
null
null
null
UTF-8
Python
false
false
866
py
"""sofutomo URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.2/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path, include from . import settings from django.conf.urls.static import static urlpatterns = [ path('admin/', admin.site.urls), path('',include('sofutomoapp.urls')) ]
7ea26a36c680a685587d647cbcaa3f974142f6c8
f5226bf0b33c556d92ece4bc6e8718654cd3fc5c
/IDS_Deeplearning/MLP_nets.py
90f77f0e0b62bac0bce02f554abf7cf27e74fada
[ "MIT" ]
permissive
holytemple/DeepLearning_IDS
3cafc06f125825f04832c619ec0486d28aee1f54
bdb1bc0a9ff113563796853058eab43056c5380d
refs/heads/master
2022-04-25T19:03:44.051931
2020-04-26T22:07:57
2020-04-26T22:07:57
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,028
py
from sklearn.neural_network import MLPClassifier from sklearn import model_selection # cross_val_score from sklearn.metrics import precision_score, precision_recall_fscore_support import numpy as np # MORE INFO: http://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html # Training data: X = n_samples x m_features # NOTE """ Generic Multilayer Perceptron""" class MLP_general(object): """ Create and set the layers with their sizes i.e. (MLP_general(10, 20) has 2 layers of sizes 10 and 20)""" def __init__(self, *layers, a =1e-5, max_i = 1500): self.classifier = MLPClassifier(solver='adam', alpha=a, *layers, random_state=1, max_iter = max_i, verbose = True) def train(self, X, y, dataset): self.classifier.fit(X, y) def test (self, X_test): return self.classifier.predict(X_test) def test_batch (self, X_test): return self.classifier.predict(np.transpose(X_test.reshape(X_test.shape[0], -1))).flatten() def do_nothing(self, X_test): return X_test def compute_dataset(self, input): return np.apply_along_axis(self.test_batch, axis=0, arr=input).flatten() def validation(self, data, y_data, y_target): #kfold = KFold(n_splits=10, shuffle=True, random_state=seed) #cv = kfold x = np.transpose(data) accuracy = model_selection.cross_val_score(self.classifier, x, y_target, scoring='accuracy') #precision = model_selection.cross_val_score(self.classifier, x, target, scoring='precision') #precision_score(y_true, y_pred, average='macro') #recall = model_selection.cross_val_score(self.classifier, x, target, scoring='recall') precision, recall, fscore, m = precision_recall_fscore_support(y_target, y_data, average='macro') print("MLP Validation:") print(str(accuracy[0]) +", " +str(precision) +", " +str(recall)) ########################################################################
d94e381ba8b81c1d856d41d573d66a57415d6c5a
e61bd7e6632ae57834659a093628159597beeab5
/tests/locator.py
72a8948effd329c7d766f347f47d542aff608344
[]
no_license
batatone/AV2TAV2021.1
4e6255ac3c9d01b10b58602ecbdf3cdb16d27d0b
35adc0ffd2dce3e83f8662818039f0b551b57d70
refs/heads/master
2023-06-25T17:01:31.007078
2021-07-20T02:35:37
2021-07-20T02:35:37
387,648,245
0
0
null
null
null
null
UTF-8
Python
false
false
341
py
from selenium.webdriver.common.by import By class MainPageLocators(object): """A class for main page locators. All main page locators should come here""" GO_BUTTON = (By.ID, 'submit') class SearchResultsPageLocators(object): """A class for search results locators. All search results locators should come here""" pass
7942776927a978188b0a944b293ba33811c03608
72f153bd36709ef442b6bf49516f9d6312fc52f7
/students/migrations/0050_auto_20171119_1758.py
5cd2ab71161212c3b4a7298b86318ab78ecce229
[]
no_license
sabinusi/fypBackended
421a6132d7b4dd669eea4b5ee3dac6530174fcbd
02554f37ba3b3bbf3ee03281f5b75af9cac9b6d5
refs/heads/master
2020-03-23T15:18:46.580040
2018-07-21T03:03:51
2018-07-21T03:03:51
141,737,469
0
0
null
null
null
null
UTF-8
Python
false
false
689
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-11-19 14:58 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('students', '0049_auto_20171108_0925'), ] operations = [ migrations.RemoveField( model_name='message', name='student_id', ), migrations.AddField( model_name='message', name='student_group_id', field=models.ForeignKey(max_length=100, null=True, on_delete=django.db.models.deletion.CASCADE, to='students.StudentGroups'), ), ]
d626db6ccf34087b1f0c1f1b1f9a1947717d0114
6f746a10104bbf07b8dcb1446d4fd05a81f7d528
/manual.py
b7b11bfed5792d524ac754618768d063db9d7cdc
[]
no_license
srishabh26/project
86666e248c71c2a420fc543df695e5c52339f6ac
f1783e1da0478ae4a7ff3e7ea112e63c5c42d3b7
refs/heads/master
2020-12-30T09:58:34.104826
2017-08-03T16:36:53
2017-08-03T16:36:53
99,248,019
0
0
null
null
null
null
UTF-8
Python
false
false
1,447
py
#!/usr/bin/python print "Content-Type:text/html" print import commands import operator import os import cgi import cgitb cgitb.enable() t=commands.getstatusoutput("nmap -sP 192.168.56.0-255 | grep 'Nmap scan'| awk '{print $5}'") commands.getstatusoutput("touch /now/file1.txt") f1=open("/now/file1.txt",'wrt') f1.write(t[1]) f2=open("/now/file1.txt",'r') f1.close() l=f2.readlines() l1=[] for x in l: p=x.strip() l1.append(p) mydict=dict() for x in l1: print x if (x=='192.168.56.1' or x=='192.168.56.100' or x=='192.168.56.101'): pass else: t=commands.getstatusoutput("sshpass -p a ssh -l root "+x+" free -m |awk 'NR==2{print $2}'") mydict[x]=int(t[1]) mydict.update() sorted_mydict=sorted(mydict.items(),key=operator.itemgetter(1)) ind=0 cnt=1 for i in sorted_mydict: if cnt==1: print """<table border="1"> <th>IP OF SYSTEM</th> <th>FREE-RAM</th>""" print """<tr> <td>%s </td>"""%sorted_mydict[ind][0] print """<td>%s</td>"""%sorted_mydict[ind][1] print """</tr>""" cnt=cnt+1 ind=ind+1 print """<form action='http://192.168.56.101/cgi-bin/rishabh.py'> Namenode-ip<input type="text" name='nn' /><br /> Job-Tracker<input type="text" name='jt' /><br /> """ for i in range(0,(len(sorted_mydict)-2),1): print """ Datanode<input type="text" name='dn' /><br/>""" print """<input type="submit" /></form>""" yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
da5def1d289f5279c5f5974e7fee349bff40d76d
579f794e83bfb7f15f8db6c6700b04e014962767
/python_server.py
7cfe93d18d391f52173f2c7a1f7053f3b0500d5b
[ "Apache-2.0" ]
permissive
mavlyutovrus/books_ocr
e31728785ceb0f4293815b3a910d2f8e85e66a65
e033c676bc2fb432f1820ee752573c0d0f5315db
refs/heads/master
2021-01-22T04:54:17.444752
2015-08-05T15:15:35
2015-08-05T15:15:35
39,854,726
0
0
null
null
null
null
UTF-8
Python
false
false
7,503
py
import numpy import time from datetime import timedelta import BaseHTTPServer import urlparse import urllib2 import json import os import sys from assign_page_numbers import upload_page_data def merge_blocks(first, second): return ( (min(first[0][0], second[0][0]), max(first[0][1], second[0][1])), (min(first[1][0], second[1][0]), max(first[1][1], second[1][1])), ) from datetime import datetime from urlparse import parse_qs def dump_block(block): return ",".join(str(item) for item in [block[0][0], block[0][1], block[1][0], block[1][1]]) def parse_block_dump(block): x1, x2, y1, y2= [int(item) for item in block.split(",")] return ((x1, x2), (y1, y2)) def overlap(first, second): if first[0][0] < second[0][1] and first[0][1] > second[0][0] and \ first[1][0] < second[1][1] and first[1][1] > second[1][0]: return True return False def remove_all_formulas_overlapping_images(paragraphs, images, formulas): while True: to_drop_images = set() to_drop_formulas = set() for first in xrange(len(images)): if first in to_drop_images: continue for second in xrange(first + 1, len(images)): if second in to_drop_images: continue if overlap(images[first], images[second]): images[first] = merge_blocks(images[first], images[second]) to_drop_images.add(second) for form_index in xrange(len(formulas)): if form_index in to_drop_formulas: continue if overlap(images[first], formulas[form_index]): images[first] = merge_blocks(images[first], formulas[form_index]) to_drop_formulas.add(form_index) if not to_drop_formulas and not to_drop_images: break formulas = [formulas[form_index] for form_index in xrange(len(formulas)) if not form_index in to_drop_formulas] images = [images[index] for index in xrange(len(images)) if not index in to_drop_images] return paragraphs, images, formulas img_source = "/home/arslan/src/ngpedia/016774_rot/" blocks_source = "/home/arslan/src/ngpedia/016774_blocks/" pages = [fname for fname in os.listdir(img_source) \ if not fname.startswith(".") and fname[-3:] in ["png", "tif", "jpg", "peg"]] pages.sort() def save_corrected_blocks(paragraphs, images, formulas, page): paragraphs, images, formulas = remove_all_formulas_overlapping_images(paragraphs, images, formulas) dump_line = "" for block in paragraphs: dump_line += "paragraph\t" + dump_block(block) + "\n" for block in images: dump_line += "image\t" + dump_block(block) + "\n" for block in formulas: dump_line += "formula\t" + dump_block(block) + "\n" open(blocks_source + page + ".corr", "w").write(dump_line) def load_corrected_blocks(paragraphs, images, formulas, page): if os.path.isfile(blocks_source + page + ".corr"): for line in open(blocks_source + page + ".corr"): addr, block_str = line.strip().split("\t") block = parse_block_dump(block_str) if addr == "paragraph": paragraphs.append(block) elif addr =="image": images.append(block) elif addr == "formula": formulas.append(block) else: print "FUCKUP", line return True return False class GetHandler (BaseHTTPServer.BaseHTTPRequestHandler): def do_GET(self): full_query = self.path full_query = full_query.replace("?callback=", "&callback=") query = urlparse.parse_qs(urlparse.urlparse(full_query).query) query_type = full_query.split("?")[0] page = pages[0] response = "['', [], [], []]" if "/next_page" in query_type and "page" in query: page = query["page"][0].split("/")[-1] if page in pages: cur_index = pages.index(page) if cur_index < len(pages): page = pages[cur_index + 1] if "/prev_page" in query_type and "page" in query: page = query["page"][0].split("/")[-1] if page in pages: cur_index = pages.index(page) if cur_index > 0: page = pages[cur_index - 1] if "/page_send" in query_type and "page" in query: page = query["page"][0].split("/")[-1] paragraphs = [] formulas = [] images = [] for field, array in [("p", paragraphs), ("f", formulas), ("i", images)]: if field in query: for block in query[field]: try: x1, x2, y1, y2 = [int(chunk) for chunk in block.split(',')] array += [((x1, x2), (y1, y2),)] except: print "fuckup:", block save_corrected_blocks(paragraphs, images, formulas, page) print "saved" if 1: print page paragraphs_blocks = [] formulas = [] images = [] if not load_corrected_blocks(paragraphs_blocks, images, formulas, page): print "load orig" paragraphs, _, formulas, images, _ = upload_page_data(blocks_source + page) paragraphs_blocks = [] for paragraph in paragraphs: by_height = [(line[0][0], line) for line in paragraph] by_height.sort() paragraph = [line for _, line in by_height] paragraph_block = paragraph[0] for line_block in paragraph: paragraph_block = merge_blocks(line_block, paragraph_block) paragraphs_blocks += [paragraph_block] parags_str = ",".join([str(coord) for block in paragraphs_blocks for dim in block for coord in dim]) formulas_str = ",".join([str(coord) for block in formulas for dim in block for coord in dim]) images_str = ",".join([str(coord) for block in images for dim in block for coord in dim]) response = "[\"%s\", [%s], [%s], [%s]]" % (img_source + page, parags_str, images_str, formulas_str) function_name = query.has_key("callback") and query["callback"][0] or "" response = function_name + "(" + response + ")" response = response.encode("utf8") request_headers = self.headers.__str__().replace(chr(10), " ").replace(chr(13), " ") print "[STAT]\tclient:", self.client_address, "\theaders:", request_headers, "\tquery:", full_query sys.stdout.flush() self.send_response(200) self.send_header("Content-type", "text/plain") self.send_header("Content-Length", str(len(response))) self.end_headers() #self.wfile.write(json_result) self.wfile.write(response) print response def run(server_class=BaseHTTPServer.HTTPServer, handler_class=GetHandler): print "starting" server_address = ('', 8084) httpd = server_class(server_address, handler_class) httpd.serve_forever() run()
65d2241ac76d4aae7a55345dc850a0577c4b935e
5eae461b1935874a5dd1a7314d6586aadd083f76
/Trees/trim_binary_search_tree.py
2b66eb1a8548269248cfbab229c381c8bcd4552f
[]
no_license
tuantla80/Data-Structure-and-Algorithm
92d0c6cb6cf8d1c19025af00beaab8b0e99641cd
1412f4e0dae33dc2bb7d20ca07f6cf7a2ac42455
refs/heads/master
2020-05-04T14:52:53.337125
2020-01-22T16:11:16
2020-01-22T16:11:16
179,214,154
2
0
null
null
null
null
UTF-8
Python
false
false
1,057
py
""" Problem: Given the root of a binary search tree (BST) and 2 numbers min and max, trim the tree such that all the numbers in the new tree are between min and max (inclusive). The resulting tree should still be a valid binary search tree. Ref: https://en.wikipedia.org/wiki/Binary_search_tree https://www.geeksforgeeks.org/binary-search-tree-data-structure/ Binary Search Tree is a node-based binary tree data structure which has the following properties: - The left subtree of a node contains only nodes with keys lesser than the node’s key. - The right subtree of a node contains only nodes with keys greater than the node’s key. - The left and right subtree each must also be a binary search tree. """ def trim_BST(tree, min_val, max_val): if not tree: return if min_val <= tree.val <= max_val: return tree if tree.val < min_val: return trim_BST(tree.right, min_val, max_val) # Recursive call if tree.val > max_val: return trim_BST(tree.left, min_val, max_val) # Recursive call
a4b19de9b958c2c4d7f83c0e579f40c2fb76724f
63b8e0cce35d7e6ba8f8e94294d4300902cf47b0
/src/seminar/group_917/seminar_06_09/repository/repository_exception.py
0420d3daaed010247d191a9699e51d900b8ea2a1
[]
no_license
912-CUCONU-MARIA/FP
722e77b459ccffc56788614640c80876d5120b0a
367935f3c1320260d2e53bebf841a89cffc31dcd
refs/heads/main
2023-09-05T02:39:14.994359
2021-11-12T18:52:57
2021-11-12T18:52:57
null
0
0
null
null
null
null
UTF-8
Python
false
false
223
py
class RepositoryException(Exception): def __init__(self, message): self._message = message @property def message(self): return self._message def __str__(self): return self._message
38321c6cc509187802168e77bee167a033d8c856
cdaa3fea8e92010caf0c5abd82f98092000eb770
/python/build/lib/dynamixel_sdk/robotis_def.py
a54e6e8bf29cf6bcb24d9a2fcb5f0214315d8ac2
[]
no_license
degibbons/TheoTheRobot
4a1ad8e121a380811d8f7f5b507825307ae20bf2
d195891f45fbfe5b973c8a83f7023065b4dd3642
refs/heads/master
2023-06-19T05:30:49.504624
2021-07-14T21:00:43
2021-07-14T21:00:43
295,028,325
1
0
null
null
null
null
UTF-8
Python
false
false
2,113
py
#!/usr/bin/env python # -*- coding: utf-8 -*- ################################################################################ # Copyright 2017 ROBOTIS CO., LTD. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ # Author: Ryu Woon Jung (Leon) BROADCAST_ID = 0xFE # 254 MAX_ID = 0xFC # 252 # Instruction for DXL Protocol INST_PING = 1 INST_READ = 2 INST_WRITE = 3 INST_REG_WRITE = 4 INST_ACTION = 5 INST_FACTORY_RESET = 6 INST_CLEAR = 16 INST_SYNC_WRITE = 131 # 0x83 INST_BULK_READ = 146 # 0x92 # --- Only for 2.0 --- INST_REBOOT = 8 INST_STATUS = 85 # 0x55 INST_SYNC_READ = 130 # 0x82 INST_BULK_WRITE = 147 # 0x93 # Communication Result COMM_SUCCESS = 0 # tx or rx packet communication success COMM_PORT_BUSY = -1000 # Port is busy (in use) COMM_TX_FAIL = -1001 # Failed transmit instruction packet COMM_RX_FAIL = -1002 # Failed get status packet COMM_TX_ERROR = -2000 # Incorrect instruction packet COMM_RX_WAITING = -3000 # Now recieving status packet COMM_RX_TIMEOUT = -3001 # There is no status packet COMM_RX_CORRUPT = -3002 # Incorrect status packet COMM_NOT_AVAILABLE = -9000 # # Macro for Control Table Value def DXL_MAKEWORD(a, b): return (a & 0xFF) | ((b & 0xFF) << 8) def DXL_MAKEDWORD(a, b): return (a & 0xFFFF) | (b & 0xFFFF) << 16 def DXL_LOWORD(l): return l & 0xFFFF def DXL_HIWORD(l): return (l >> 16) & 0xFFFF def DXL_LOBYTE(w): return w & 0xFF def DXL_HIBYTE(w): return (w >> 8) & 0xFF
1791b1c95ca407ce116c93b10189f8b1acf6f6a7
3e2ea4d833a7c7511251d3df28eb91b4a1762d21
/projeto/projeto/settings.py
978964177b362986ff837afcce1339a064287ea6
[]
no_license
UesleiJf/django-docker
1bc9da52b1ef25fea1b8dc4b836798dac36141a8
efbfacdd4c3cf43e049a8ca396553f3ffb701a19
refs/heads/master
2021-06-24T13:11:01.545244
2019-12-05T00:39:36
2019-12-05T00:39:36
212,711,060
1
0
null
2021-03-19T22:42:22
2019-10-04T01:02:23
Python
UTF-8
Python
false
false
3,347
py
""" Django settings for projeto project. Generated by 'django-admin startproject' using Django 2.2.6. For more information on this file, see https://docs.djangoproject.com/en/2.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '*uyw9@s-o(sw$mw^a8g@8)7&&nx(g32gk2fud=zjw#^gzux8b$' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'apps.core', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'projeto.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'projeto.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases #DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.sqlite3', # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), # } #} DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'postgres', 'USER': 'postgres', 'PASSWORD': 'postgres', 'HOST': 'db', 'PORT': '5432', } } # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.2/howto/static-files/ STATIC_URL = '/static/'
4837e0f2e8333a0f809ef8ca31bc658d1cf1a15e
ee3f16b398eac17027f2910054ea93176f89d994
/scrapy_test_task/web_crawl/web_crawl/spiders/gaz_spider.py
c3fd5a099fd74951ff42cc6b50206af893f4525c
[]
no_license
FE1979/Scrapy_test_task
50dd87d62c079f18e35b849adec7abca15cee869
e721ecc33b49c2b74f99947b934bbb10085a54fb
refs/heads/master
2022-11-09T23:04:39.906579
2019-07-03T03:58:16
2019-07-03T03:58:16
192,884,832
0
0
null
2022-11-04T19:35:15
2019-06-20T08:52:11
Python
UTF-8
Python
false
false
2,505
py
import scrapy import time from scrapy.loader import ItemLoader from web_crawl.items import VacancyItem class GazpromSpider(scrapy.Spider): name = 'gaz_spider' base_url = 'https://www.gazpromvacancy.ru/' start_url = base_url + 'vacancies/' count = 0 def start_requests(self): yield scrapy.Request(self.start_url, callback=self.parse) def parse(self, response): """ Moves through pages and scrap every vacancy """ # get vacancy count jobs_found = response.css('span.jobs-found strong::text').get() # get vacancies urls on the current page jobs_list = response.css('div.list-container').css('div.item') job_urls = [job.css('a::attr(href)').get() for job in jobs_list] for url in job_urls: yield response.follow(self.base_url+url, self.parse_job) next_page = response.css('li.next a::attr(href)').get() if next_page is not None: next_page = self.base_url + next_page yield scrapy.Request(next_page, callback=self.parse) def parse_job(self, response): """ Gets job description """ vacancy = ItemLoader(item=VacancyItem(), response=response) self.count += 1 job_desc = ''.join(item for item in \ response.css('div.job-description').css('::text').getall()) job_desc = ''.join(c for c in job_desc if c.isprintable()) job_reqs = ''.join(item for item in \ response.css('div.job-requirements').css('::text').getall()) job_reqs = ''.join(c for c in job_reqs if c.isprintable()) job_date = ''.join(c for c in \ response.css('span.date::text').get() if \ c.isprintable()) job_region = ''.join(c for c in \ response.css('span.region::text').get() if \ c.isprintable()) vacancy.add_value('count', self.count) vacancy.add_css('job_title', 'h1.mainHeader::text') vacancy.add_value('post_date', job_date) vacancy.add_value('region', job_region) vacancy.add_css('employer', 'div.employer dd::text') vacancy.add_css('location', 'div.location dd::text') vacancy.add_value('job_description', job_desc) vacancy.add_value('job_requirements', job_reqs) vacancy.add_value('scrap_date', time.strftime('%d/%m/%Y')) return vacancy.load_item()
669d3d389b683c155d5e63ee53d083d6d33985a3
1aaba2be0479b43a76f3e85ea62cad8d42827d49
/lib/pymedphys/_experimental/wlutz/pylinacwrapper.py
6235b8067c04ba22fe53286afd5b0f5215d6a0b9
[ "Apache-2.0" ]
permissive
changran/pymedphys
a44a9aa9ec375c17ea73815c1a8e2a6a5a002c1e
164a7a5c6051ab4c8fd6efdb79c3bfb0684b65df
refs/heads/main
2023-07-30T21:32:07.697743
2021-09-10T11:37:02
2021-09-10T11:37:02
407,394,958
1
0
Apache-2.0
2021-09-17T03:42:49
2021-09-17T03:42:48
null
UTF-8
Python
false
false
8,650
py
# Copyright (C) 2020 Cancer Care Associates and Simon Biggs # Copyright (C) 2019 Cancer Care Associates # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pymedphys._imports import numpy as np from pymedphys._experimental.vendor.pylinac_vendored import ( winstonlutz as _pylinac_vendored_wlutz, ) from pymedphys._experimental.vendor.pylinac_vendored._pylinac_installed import ( pylinac as _pylinac_installed, ) from . import transformation as _transformation def _get_dx_dy_dpi(x, y): dx = _convert_grid_to_step_size(x) dy = _convert_grid_to_step_size(y) if dx == dy: dpmm = dx dpi = dpmm * 25.4 else: dpi = None return dx, dy, dpi def run_wlutz_raw( x, y, image, find_bb=True, pylinac_version=None, fill_errors_with_nan=False ): dx, dy, dpi = _get_dx_dy_dpi(x, y) WLImage = _get_class_for_version(pylinac_version) wl_image = WLImage(image, dpi=dpi) nan_coords = [np.nan, np.nan] try: field_centre = [ wl_image.field_cax.x * dx + np.min(x), wl_image.field_cax.y * dy + np.min(y), ] except ValueError: if fill_errors_with_nan: field_centre = nan_coords else: raise if find_bb: try: bb_centre = [wl_image.bb.x * dx + np.min(x), wl_image.bb.y * dy + np.min(y)] except ValueError: if fill_errors_with_nan: bb_centre = nan_coords else: raise else: bb_centre = nan_coords return field_centre, bb_centre def _get_class_for_version(pylinac_version=None): if pylinac_version is None: pylinac_version = _pylinac_installed.__version__ VERSION_TO_CLASS_MAP = _pylinac_vendored_wlutz.get_version_to_class_map() WLImage = VERSION_TO_CLASS_MAP[pylinac_version] return WLImage def find_bb_only_raw(x, y, image, padding): dx, dy, dpi = _get_dx_dy_dpi(x, y) WLImage = _pylinac_vendored_wlutz.WLImageCurrent wl_image = WLImage(image, dpi=dpi) wl_image.set_bounding_box_by_padding(padding) bb_centre = [wl_image.bb.x * dx + np.min(x), wl_image.bb.y * dy + np.min(y)] return bb_centre def _convert_grid_to_step_size(x): diff_x = np.diff(x) dx_mean = np.mean(diff_x) dx_deviations = np.abs(diff_x - dx_mean) if np.any(dx_deviations > 0.00001): raise ValueError( "Exactly one grid step size required. Maximum deviation " f"from the mean was {np.max(dx_deviations)}." ) return dx_mean def find_bb_only(x, y, image, edge_lengths, penumbra, field_centre, field_rotation): extra_pixels_padding = 20 out_of_field_padding_factor = 2 in_field_padding_factor = 2 bounding_box_padding_factor = out_of_field_padding_factor + in_field_padding_factor dx = _convert_grid_to_step_size(x) dy = _convert_grid_to_step_size(y) x_radius = ( edge_lengths[0] / 2 + penumbra * out_of_field_padding_factor + extra_pixels_padding * dx ) y_radius = ( edge_lengths[1] / 2 + penumbra * out_of_field_padding_factor + extra_pixels_padding * dy ) bounding_box_x_padding = ( np.round(bounding_box_padding_factor * penumbra / dx) + extra_pixels_padding ) bounding_box_y_padding = ( np.round(bounding_box_padding_factor * penumbra / dy) + extra_pixels_padding ) padding = [bounding_box_x_padding, bounding_box_y_padding] x_new = np.arange(-x_radius, x_radius + dx / 2, dx) y_new = np.arange(-y_radius, y_radius + dy / 2, dy) centralised_image = _transformation.create_centralised_image( x, y, image, field_centre, field_rotation, new_x=x_new, new_y=y_new ) # try: raw_bb_centre = find_bb_only_raw(x_new, y_new, centralised_image, padding) # except Exception as e: # plt.pcolormesh(x_new, y_new, centralised_image, shading="nearest") # plt.axis("equal") # print(e) # plt.show() # field_centre, bb_centre = run_wlutz_raw(x_new, y_new, centralised_image) # print(field_centre) # print(bb_centre) # raise bb_centre = _transformation.transform_point( raw_bb_centre, field_centre, field_rotation ) return bb_centre def run_wlutz( x, y, image, edge_lengths, field_rotation, find_bb=True, interpolated_pixel_size=0.25, pylinac_versions=None, fill_errors_with_nan=False, ): offset_iter = 10 current_pylinac_version = _pylinac_installed.__version__ # By defining a search offset and radius, artefacts that can cause # offsets in the pylinac algorithm can be cropped out. See the # following issue for more details: # <https://github.com/jrkerns/pylinac/issues/333> # By defining the search radius to equal the maximum side length # the interpolation region being search over by PyLinac is twice # that of the maximum field edge. pylinac_offset_calculation = run_wlutz_with_manual_search_definition( x, y, image, field_rotation, search_radius=None, find_bb=False, pylinac_versions=[current_pylinac_version], ) previous_search_offset = pylinac_offset_calculation[current_pylinac_version][ "field_centre" ] for _ in range(offset_iter): pylinac_offset_calculation = run_wlutz_with_manual_search_definition( x, y, image, field_rotation, search_radius=np.max(edge_lengths), search_offset=previous_search_offset, find_bb=False, pylinac_versions=[current_pylinac_version], ) search_offset = pylinac_offset_calculation[current_pylinac_version][ "field_centre" ] if np.allclose(search_offset, previous_search_offset, atol=0.2): break previous_search_offset = search_offset pylinac_calculation_with_offset = run_wlutz_with_manual_search_definition( x, y, image, field_rotation, search_radius=np.max(edge_lengths), search_offset=search_offset, find_bb=find_bb, interpolated_pixel_size=interpolated_pixel_size, pylinac_versions=pylinac_versions, fill_errors_with_nan=fill_errors_with_nan, ) return pylinac_calculation_with_offset def run_wlutz_with_manual_search_definition( x, y, image, field_rotation, search_radius=None, search_offset=None, find_bb=True, interpolated_pixel_size=0.25, pylinac_versions=None, fill_errors_with_nan=False, ): if search_offset is None: search_offset = [np.mean(x), np.mean(y)] if search_radius is None: new_x = x - search_offset[0] new_y = y - search_offset[1] else: new_x = np.arange( -search_radius, # pylint: disable = invalid-unary-operand-type search_radius + interpolated_pixel_size / 2, interpolated_pixel_size, ) new_y = new_x centralised_image = _transformation.create_centralised_image( x, y, image, search_offset, field_rotation, new_x=new_x, new_y=new_y ) if pylinac_versions is None: VERSION_TO_CLASS_MAP = _pylinac_vendored_wlutz.get_version_to_class_map() pylinac_versions = VERSION_TO_CLASS_MAP.keys() results = {} for pylinac_version in pylinac_versions: raw_field_centre, raw_bb_centre = run_wlutz_raw( new_x, new_y, centralised_image, find_bb=find_bb, pylinac_version=pylinac_version, fill_errors_with_nan=fill_errors_with_nan, ) bb_centre = _transformation.transform_point( raw_bb_centre, search_offset, field_rotation ) field_centre = _transformation.transform_point( raw_field_centre, search_offset, field_rotation ) results[pylinac_version] = { "field_centre": field_centre, "bb_centre": bb_centre, } return results
abd36f4c899df2253c2c005d0b07894d8344e61d
c9cf83cab2bf6a84800b0b8871b36182950bda01
/Beam-Deflection-Visualiser-master/choice4.py
79b2d1c14c3a8d220e073b254403f9e06d20b035
[]
no_license
1zjz/Beam-Deflection-Visualiser
76097f4b51eda7a086f663a83b290afb5efe8f2a
22d000cc82db74021a329c6caa8c0286ff422df4
refs/heads/master
2023-03-21T22:01:10.989841
2017-12-10T16:57:28
2017-12-10T16:57:28
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,490
py
#--------------------------Initialization-------------------------------------------- import pandas as pd import numpy as np import os from display_menu import * #defining menu options to display save_menu_options= np.array(["Change Directory", "Stay in the current directory", "Display the files in current directory", "Quit"]) def load(load_forces,load_positions,beam_length,beam_type): #the load function loads a file into the program. It splits the matrix of a saved file up and assign the certain values to #load_forces,load_positions,beam_length and beam_type. #Input: User input path=os.getcwd() while True: #displaying directory print("Now you are in this directory; \n ",os.getcwd(),"\n") #displaying menu button=display_menu(save_menu_options) if button==1: #redefining path path=input("Type the file location you want to load the file from\n>>") elif button==2: #staying with this directory path=os.getcwd() elif button==3: #printing content of current directory print(os.listdir(path) ) continue elif button==4: #While loop is broken with QUIT option break while True: try: print("Q . Quit") filename=input("What is the name of the file?\n>>") if filename=="Q"or filename=="q":break #implementing quit option to go one submenu back #reading the file in and storing in variable df df = pd.read_csv(path+"/"+filename, header=None) #assigning values beam_length=df[0][1] beam_type=df[0][0] load_forces=df.loc[0,:] load_positions=df.loc[1,:] load_forces=load_forces.loc[1:][:] load_positions=load_positions.loc[1:][:] load_forces = np.array([float(x) for x in load_forces]) load_positions = np.array([float(x) for x in load_positions]) #if for some reason the beam support type is not equal 1 or 2 we set it to both ends by default if beam_type!=1 or beam_type!=2: beam_type=1 #error message except: print(filename+" file is not found please make sure the followings;\n* Make sure you are in the correct directory\n"+ "* Make sure you type the correct extension(.csv)\n") continue break break #return values return load_forces,load_positions, beam_length, beam_type
b60c5932de176922174d1a077fe4313b1fe8a10f
684e4122fae5b2879483f796964804e055e7e755
/utils/logging.py
0f3ee95562592c7faea1af9cf48e12d51353a6f4
[]
no_license
paultsw/wavenet-speech
bf2273fe0fa9f682b2242715cde2c7fa810d6e51
7ad943d9cc7a6872a14bba5239a99755f70db4cd
refs/heads/master
2021-10-24T23:23:29.048869
2019-03-29T20:10:28
2019-03-29T20:10:28
100,305,982
0
0
null
null
null
null
UTF-8
Python
false
false
2,139
py
""" General logging class for PyTorch training loops. """ import os import torch class Logger(object): """ Abstracted class to expose logging methods to a series of files on the run directory. """ def __init__(self, run_dir): """ Construct logger and keep files open. """ ### specify directories; create directories if they don't exist: self.run_dir = run_dir if not os.path.exists(self.run_dir): os.makedirs(self.run_dir) self.ckpt_dir = os.path.join(run_dir, 'ckpts/') if not os.path.exists(self.ckpt_dir): os.makedirs(self.ckpt_dir) self.log_dir = os.path.join(run_dir, 'logs/') if not os.path.exists(self.log_dir): os.makedirs(self.log_dir) ### create output logging file and keep open: self.training_log = os.path.join(self.log_dir, 'training.log') self._training_log_f = open(self.training_log, 'w') self.messages_log = os.path.join(self.log_dir, 'messages.log') self._messages_log_f = open(self.messages_log, 'w') def close(self): """ Close all file handles. """ self._training_log_f.close() self._messages_log_f.close() def log(self, loss, step, train, valid): """ Log a loss message to the logfile. """ self._training_log_f.write( "{0} @ step: {1} | Training: {2:.4f} | Validation {3:.4f} \n".format(loss, step, train, valid)) self._training_log_f.flush() def save(self, timestep, model_core, model_ctc): """Save model to run directory.""" _core_model_path = os.path.join(self.ckpt_dir, "wavenet_core.t{}.pt".format(timestep)) _ctc_model_path = os.path.join(self.ckpt_dir, "wavenet_ctc.t{}.pt".format(timestep)) torch.save(model_core.state_dict(), _core_model_path) torch.save(model_ctc.state_dict(), _ctc_model_path) self._messages_log_f.write( "[{0}] Saved wavenet base models to: {1}, {2}\n".format(timestep, _core_model_path, _ctc_model_path)) self._messages_log_f.flush()
73bc23fd4fe805aefa5137d844e5ab841fbfef68
5cad23c80ce4e22603a14d21eac48fa76517595c
/tfidf.py
958a5490e0dc729853e33b3cdf9c4a23a9e2d09d
[]
no_license
cramraj8/adrc-clustering
4f8e63b62be4bbe986679894487d89423fb26a05
d033452b9cadd40833e49c297077244383239370
refs/heads/master
2021-06-13T11:13:14.003846
2017-04-06T17:36:46
2017-04-06T17:36:46
null
0
0
null
null
null
null
UTF-8
Python
false
false
861
py
import itertools import math import numpy as np from collections import Counter def get_features_names(documents): features = list(set(itertools.chain(*[document.split() for document in documents]))) return features def get_idf(features, documents): idf = [] k = len(documents) for feature in features: x = sum([1 for document in documents if feature in document]) idf.append( math.log( k / float(x) ) ) return np.array(idf) def get_tf(features, documents): tf = [] for document in documents: c = Counter(document.split()) tf.append([c[feature] for feature in features]) return np.array(tf) def get_tfidf(documents): features = get_features_names(documents) idf = get_idf(features, documents) tf = get_tf(features, documents) tfidf = [] for k in range(0, len(documents)): tfidf.append(idf * tf[k]) return features, tfidf, tf
f57544563f9837de0e195be045203d9f161b95b2
48e8667e34a872175229e092e3bf33bfce5bdb53
/movie/forms.py
074a086356a9f42d4da49bc08674137fce4ed52d
[]
no_license
abhishakvarshney/imdb
ecd33869ca428e399ec5abe9110214d9dbda1b4c
051fcf4de88b25360a527e5965325a66c2b3c7a6
refs/heads/master
2022-12-02T18:13:52.939996
2020-08-30T12:25:30
2020-08-30T12:25:30
291,211,658
0
0
null
null
null
null
UTF-8
Python
false
false
95
py
from django import forms class UploadJSONFileForm(forms.Form): file = forms.FileField()
1f4544b3507b52901f1baf3fd47d4e23e168167c
7358c9c73ea8a156a5f34a16fce59f20e3d14a4b
/archive/apps/admin/projects.py
51ef35563c5cebb62ae81ce13acca948fc59ec0e
[]
no_license
trevorhreed/junta
852af2c47dcf50f6598d4c05fd9deff73278d9fe
bd082ecbbd8824e2ae36957b5412eda8db1d9e46
refs/heads/master
2021-01-19T06:35:45.918533
2014-02-02T00:14:38
2014-02-02T00:14:38
null
0
0
null
null
null
null
UTF-8
Python
false
false
649
py
from libs import mdb from libs.apps import BaseApp from libs.projects import Project, ProjectPhases, ProjectController class ProjectsApp(BaseApp): def get(self): self.context['projects'] = Project.query() self.context['phases'] = ProjectPhases.TypesByKey self.render('/admin/projects/projects_main') def update_phase(self): project_id = self.request.get('project_id', None) phase_id = self.request.get_range('phase_id', ProjectPhases.Phase1, ProjectPhases.Phase3, ProjectPhases.Phase1) ProjectController.update_project_phase(project_id, phase_id) self.write("Updated project '"+str(project_id)+"' to phase '"+str(phase_id)+"'.")
c8d0b3e1722280a5bcad69f3261cef189a32fdc1
cc096d321ab5c6abf54fdcea67f10e77cd02dfde
/flex-backend/pypy/translator/squeak/opformatter.py
a1f3132c2c53ed7d2ff14f785c9db8e670ae7c58
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
limweb/flex-pypy
310bd8fcd6a9ddc01c0b14a92f0298d0ae3aabd2
05aeeda183babdac80f9c10fca41e3fb1a272ccb
refs/heads/master
2021-01-19T22:10:56.654997
2008-03-19T23:51:59
2008-03-19T23:51:59
32,463,309
0
0
null
null
null
null
UTF-8
Python
false
false
7,202
py
from pypy.rlib.rarithmetic import r_int, r_uint, r_longlong, r_ulonglong from pypy.translator.squeak.codeformatter import CodeFormatter from pypy.translator.squeak.codeformatter import Message, Self, Assignment, Field def _setup_int_masks(): """Generates code for helpers to mask the various integer types.""" masks = {} # NB: behaviour of signed long longs is undefined on overflow for name, r_type in ("int", r_int), ("uint", r_uint), ("ullong", r_ulonglong): helper_name = "mask%s" % name.capitalize() if name[0] == "u": # Unsigned integer type code = """%s: i ^ i bitAnd: %s""" % (helper_name, r_type.MASK) else: # Signed integer type code = """%s: i (i <= %s) & (i >= %s) ifTrue: [^i]. (i < 0) ifTrue: [^i bitAnd: %s] ifFalse: [^(((i negated) - 1) bitAnd: %s) negated - 1] """ % (helper_name, r_type.MASK>>1, -(r_type.MASK>>1)-1, r_type.MASK>>1, r_type.MASK>>1) masks[name] = helper_name, code return masks class OpFormatter: ops = { 'new': 'new', 'runtimenew': 'new', 'classof': 'class', 'bool_not': 'not', 'cast_int_to_float': 'asFloat', # XXX this potentially incorrect (may return LargeIntegers) 'cast_float_to_int': 'truncated', } number_ops = { 'abs': 'abs', 'is_true': 'isZero not', 'neg': 'negated', 'invert': 'bitInvert', 'add': '+', 'sub': '-', 'eq': '=', 'mul': '*', 'floordiv': 'quo', 'truediv': '/ asFloat', 'mod': r'\\', 'eq': '=', 'ne': '~=', 'lt': '<', 'le': '<=', 'gt': '>', 'ge': '>=', 'and': 'bitAnd', 'or': 'bitOr', 'lshift': '<<', 'rshift': '>>', 'xor': 'bitXor', # XXX need to support x_ovf ops } number_opprefixes = "int", "uint", "llong", "ullong",\ "float", "char", "unichar" wrapping_ops = "neg", "invert", "add", "sub", "mul", "lshift" noops = "same_as", "ooupcast", "oodowncast", "cast_char_to_int", \ "cast_unichar_to_int", "cast_int_to_unichar", \ "cast_int_to_char", "cast_int_to_longlong", \ "truncate_longlong_to_int" int_masks = _setup_int_masks() def __init__(self, gen, node): self.gen = gen self.node = node self.codef = CodeFormatter(gen) def format(self, op): if self.ops.has_key(op.opname): name = self.ops[op.opname] sent = Message(name).send_to(op.args[0], op.args[1:]) return self.codef.format(sent.assign_to(op.result)) opname_parts = op.opname.split("_") if opname_parts[0] in self.number_opprefixes: return self.format_number_op( op, opname_parts[0], "_".join(opname_parts[1:])) op_method = getattr(self, "op_%s" % op.opname, None) if op_method is not None: return self.codef.format(op_method(op)) else: raise NotImplementedError( "operation not supported: %s" % op.opname) def format_number_op(self, op, ptype, opname): messages = self.number_ops[opname].split() msg = Message(messages[0]) sent_message = msg.send_to(op.args[0], op.args[1:]) for add_message in messages[1:]: sent_message = Message(add_message).send_to(sent_message, []) if opname in self.wrapping_ops \ and self.int_masks.has_key(ptype): sent_message = self.apply_mask_helper(sent_message, ptype) return self.codef.format(sent_message.assign_to(op.result)) def apply_mask_helper(self, receiver, mask_type_name): # XXX how do i get rid of this import? from pypy.translator.squeak.node import HelperNode mask_name, mask_code = self.int_masks[mask_type_name] helper = HelperNode(self.gen, Message(mask_name), mask_code) result = helper.apply([receiver]) self.gen.schedule_node(helper) return result def op_oosend(self, op): message_name = op.args[0].value message_name = self.gen.unique_method_name( op.args[1].concretetype, message_name) if op.args[1] == self.node.self: receiver = Self() else: receiver = op.args[1] sent_message = Message(message_name).send_to(receiver, op.args[2:]) return sent_message.assign_to(op.result) def op_oogetfield(self, op): INST = op.args[0].concretetype field_name = self.gen.unique_field_name(INST, op.args[1].value) if op.args[0] == self.node.self: # Private field access # Could also directly substitute op.result with name # everywhere for optimization. rvalue = Field(field_name) else: # Public field access rvalue = Message(field_name).send_to(op.args[0], []) return Assignment(op.result, rvalue) def op_oosetfield(self, op): # Note that the result variable is never used INST = op.args[0].concretetype field_name = self.gen.unique_field_name(INST, op.args[1].value) field_value = op.args[2] if op.args[0] == self.node.self: # Private field access return Assignment(Field(field_name), field_value) else: # Public field access return Message(field_name).send_to(op.args[0], [field_value]) def op_direct_call(self, op): # XXX how do i get rid of this import? from pypy.translator.squeak.node import FunctionNode function_name = self.gen.unique_func_name(op.args[0].value.graph) msg = Message(function_name).send_to(FunctionNode.FUNCTIONS, op.args[1:]) return msg.assign_to(op.result) def cast_bool(self, op, true_repr, false_repr): msg = Message("ifTrue: [%s] ifFalse: [%s]" % (true_repr, false_repr)) return msg.send_to(op.args[0], []).assign_to(op.result) def op_cast_bool_to_int(self, op): return self.cast_bool(op, "1", "0") op_cast_bool_to_uint = op_cast_bool_to_int def op_cast_bool_to_float(self, op): return self.cast_bool(op, "1.0", "0.0") def masking_cast(self, op, mask): cast = self.apply_mask_helper(op.args[0], mask) return Assignment(op.result, cast) def op_cast_int_to_uint(self, op): return self.masking_cast(op, "uint") def op_cast_uint_to_int(self, op): return self.masking_cast(op, "int") def op_cast_float_to_uint(self, op): truncated = Message("truncated").send_to(op.args[0], []) return Assignment(op.result, self.apply_mask_helper(truncated, "uint")) def noop(self, op): return Assignment(op.result, op.args[0]) for opname in OpFormatter.noops: setattr(OpFormatter, "op_%s" % opname, OpFormatter.noop)
[ "lucio.torre@dbd81ab4-9648-0410-a770-9b81666e587d" ]
lucio.torre@dbd81ab4-9648-0410-a770-9b81666e587d
5d4a0628630cfb39b3ac6c56399a0527cb5f0fd6
4ed2ecfbbad9b86e483e436bf09dacc2a5bc4839
/setup.py
ba4a697ce1d4c6a9ce2643360a1707992509c665
[]
no_license
adamdeprince/directory-1
cd142718b40bad1aa53124890d15e82bb4a51dbc
f09f856890db6127895a9584734296add8bbf60f
refs/heads/master
2021-01-17T18:18:31.151826
2016-09-06T18:16:49
2016-09-06T18:16:49
67,522,585
0
0
null
2016-09-06T15:46:48
2016-09-06T15:46:48
null
UTF-8
Python
false
false
561
py
try: from setuptools import setup, Extension except ImportError: from distutils.core import setup, Extension setup(name='directory', author = 'Adam DePrince', author_email = '[email protected]', url = 'https://github.com/pelotoncycle/dir', version='0.0.4', description="Expose opendir/readdir/closedir", scripts=[ 'scripts/pfind', ], ext_modules=( [ Extension( name='directory', sources=['directorymodule.c']), ]))
c0f801066fbe4fbc488aa4f53aa2bdb9aa7d3d50
c4b30476c2b9e771ce600d9866610752001eab7d
/project_offer/interface/migrations/0005_auto_20190809_0003.py
24689e53db30e99c5bf2f34fcdc2744ee0fb690e
[]
no_license
dev-kanishk/project-offers
1d5701997f1a6862c8e5547b5880f2aa4f7f9958
1c850aad494ffb6c390e437c39238f9251736399
refs/heads/master
2022-07-25T08:56:20.673443
2020-05-22T17:09:01
2020-05-22T17:09:01
173,722,967
0
0
null
null
null
null
UTF-8
Python
false
false
1,671
py
# Generated by Django 2.1.5 on 2019-08-09 00:03 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('interface', '0004_comments'), ] operations = [ migrations.CreateModel( name='offer_images', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('photo', models.ImageField(blank=True, upload_to='images/')), ], ), migrations.RenameField( model_name='offer_discription', old_name='photo', new_name='index_photo', ), migrations.RemoveField( model_name='offer_discription', name='photo1', ), migrations.RemoveField( model_name='offer_discription', name='photo2', ), migrations.RemoveField( model_name='offer_discription', name='photo3', ), migrations.RemoveField( model_name='offer_discription', name='photo4', ), migrations.RemoveField( model_name='offer_discription', name='photo5', ), migrations.AddField( model_name='offer_discription', name='comments', field=models.IntegerField(default=0), ), migrations.AddField( model_name='offer_images', name='offer', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='interface.Offer_discription'), ), ]
[ "dev-kanishk" ]
dev-kanishk
7cfaebc2206dcf028b358be036617c9466daa7ba
b4874cbd7299492277ad28441bad05e6348307f2
/dummies/zerg/twelve_pool.py
8794fd0f17c5215f414c095498ee3bcfa1555e53
[ "MIT" ]
permissive
MadManSC2/sharpy-sc2
7d405578413c7a8f8fc1e4030ad719d7fe5df10a
13950357df2db58033daab24f076e3ae83f0b2a8
refs/heads/master
2021-01-05T03:38:58.038563
2020-03-07T20:35:24
2020-03-07T20:35:24
240,865,466
1
0
MIT
2020-02-16T09:38:05
2020-02-16T09:38:04
null
UTF-8
Python
false
false
2,983
py
from sharpy.general.extended_power import ExtendedPower from sharpy.managers.roles import UnitTask from sharpy.plans.acts import * from sharpy.plans.acts.zerg import * from sharpy.plans.require import * from sharpy.plans.tactics import * from sharpy.plans.tactics.zerg import * from sharpy.plans import BuildOrder, Step, StepBuildGas from sharpy.knowledges import KnowledgeBot from sc2 import BotAI, UnitTypeId, AbilityId, Race from sc2.units import Units class PlanZoneAttack2(PlanZoneAttack): def _start_attack(self, power: ExtendedPower, attackers: Units): drones = self.cache.own(UnitTypeId.DRONE).closest_n_units(self.knowledge.enemy_start_location, 10) self.retreat_multiplier = 0 # never retreat, never surrender for unit in drones: self.knowledge.roles.set_task(UnitTask.Attacking, unit) return super()._start_attack(power, attackers) class TwelvePool(KnowledgeBot): """Zerg 12 pool cheese tactic""" def __init__(self): super().__init__("12pool") async def create_plan(self) -> BuildOrder: build_step_buildings = [ # 12 Pool Step(None, ActBuilding(UnitTypeId.SPAWNINGPOOL, 1), RequiredUnitExists(UnitTypeId.SPAWNINGPOOL, 1)), ] finish = [ Step(RequireCustom(lambda k: self.enemy_structures.flying.exists and self.supply_used > 30), StepBuildGas(2)), ActExpand(2), RequiredUnitExists(UnitTypeId.DRONE, 20), MorphLair(), RequiredUnitExists(UnitTypeId.DRONE, 30), StepBuildGas(4), ActBuilding(UnitTypeId.SPIRE), ZergUnit(UnitTypeId.MUTALISK, 10, priority=True) ] build_step_units = [ # 12 Pool followed by overlord Step(RequiredUnitExists(UnitTypeId.SPAWNINGPOOL, 1), ActUnit(UnitTypeId.OVERLORD, UnitTypeId.LARVA, 2), RequiredUnitExists(UnitTypeId.OVERLORD, 2)), # TheMusZero Step(RequiredUnitExists(UnitTypeId.SPAWNINGPOOL, 1), ActUnit(UnitTypeId.DRONE, UnitTypeId.LARVA, 14), RequiredUnitExists(UnitTypeId.DRONE, 14)), # Queen for more larvae # BuildStep(RequiredUnitExists(UnitTypeId.SPAWNINGPOOL, 1), ActUnit(UnitTypeId.QUEEN, UnitTypeId.HATCHERY, 1), RequiredUnitExists(UnitTypeId.QUEEN, 1)), # Endless zerglings Step(RequiredUnitExists(UnitTypeId.SPAWNINGPOOL, 1), ActUnit(UnitTypeId.ZERGLING, UnitTypeId.LARVA), None), ] return BuildOrder([ build_step_buildings, finish, build_step_units, AutoOverLord(), InjectLarva(), PlanWorkerOnlyDefense(), PlanZoneDefense(), PlanZoneGather(), PlanZoneAttack2(2), PlanFinishEnemy(), ]) class LadderBot(TwelvePool): @property def my_race(self): return Race.Zerg
a25f33555b672c081f922302c3bca67aeac3a460
1f447da23a11510e091d60254e66139a185c1a8b
/controllers/xbox1-first-attempt/xbox1_dev.py
43413e2c35b3f993fff8cdd401ca2087563e1a51
[]
no_license
johnmorse/R2D2
467e1e84045b8d59851e1b9c71fc3e124395a01e
bf35225d5c72f9dac9fbb557ac80fcf1670e86ab
refs/heads/master
2021-03-31T15:45:52.767707
2020-05-09T23:12:47
2020-05-09T23:12:47
248,117,217
1
0
null
null
null
null
UTF-8
Python
false
false
5,609
py
#!/usr/bin/python """ XBOX1 Joystick controller MUST RUN WITH !!SUDO!! """ #from __future__ import print_function #from future import standard_library #standard_library.install_aliases() from builtins import str from builtins import range import pygame #import requests #import csv #import configparser import xbox1_defines import os import sys import time import math import datetime #import argparse from io import StringIO #from collections import defaultdict #from shutil import copyfile #import odrive #import signal #import SabertoothPacketSerial #sys.path.insert(0, '/home/pi/r2_control') #from r2utils import telegram, internet, mainconfig def printButtonEvent(event): if event.button == xbox1_defines.BUT_A: print("A") elif event.button == xbox1_defines.BUT_B: print("B") elif event.button == xbox1_defines.BUT_X: print("X") elif event.button == xbox1_defines.BUT_Y: print("Y") elif event.button == xbox1_defines.BUT_TOPL: print("Top left button") elif event.button == xbox1_defines.BUT_TOPR: print("Top right button") elif event.button == xbox1_defines.BUT_MENU: print("Menu") elif event.button == xbox1_defines.BUT_JOYSTL: print("Left joystick click") elif event.button == xbox1_defines.BUT_JOYSTR: print("Right joystick click") else: print "Unknown button" print(event) def getButtonStateString(j, buttons): hat = j.get_hat(0) print(hat) buf = StringIO() for i in range(buttons): button = j.get_button(i) button = j.get_button(i) # Up if i == xbox1_defines.BUT_EMPTY3: if hat[1] > 0: button = 1 # Down elif i == xbox1_defines.BUT_EMPTY4: if hat[1] < 0: button = 1 # Left elif i == xbox1_defines.BUT_EMPTY7: if hat[0] < 0: button = 1 # Right elif i == xbox1_defines.BUT_EMPTY8: if hat[0] > 0: button = 1 buf.write(str(button)) # Fill in the blank 0 to get to the last two postions dif = buttons - xbox1_defines.BUT_EMPTY7 - 1 while dif > 0: buf.write('0') dif -= 1 # Left if buttons <= xbox1_defines.BUT_EMPTY7: print("Left") button = 0; if hat[0] < 0: button = 1 buf.write(str(button)) # Right if buttons <= xbox1_defines.BUT_EMPTY8: button = 0; if hat[0] > 0: button = 1 buf.write(str(button)) return buf.getvalue() pygame.display.init() while True: pygame.joystick.quit() pygame.joystick.init() num_joysticks = pygame.joystick.get_count() print("Waiting for joystick... (count: %s)" % num_joysticks) if num_joysticks != 0: break time.sleep(5) pygame.init() size = (pygame.display.Info().current_w, pygame.display.Info().current_h) print("Framebuffer size: %d x %d" % (size[0], size[1])) j = pygame.joystick.Joystick(0) j.init() buttons = j.get_numbuttons() hats = j.get_numhats() print("Joystick buttons(%d) hats(%d)" % (buttons, hats)) last_command = time.time() joystick = True previous = "" _throttle = 0 _turning = 0 speed_fac = 0 # Main loop while (joystick): if os.path.exists('/dev/input/js0') == False: joystick = False continue events = pygame.event.get() for event in events: if event.type == pygame.JOYBUTTONUP: # Ignore button up events, just track button down for now continue if event.type == pygame.JOYBUTTONDOWN: printButtonEvent(event) combo = getButtonStateString(j, buttons) print("Buttons pressed: %s" % combo) if event.button == 11: joystick = False continue elif event.type == pygame.JOYAXISMOTION: if event.axis == 0: # Left axis vertial print("Value (Drive): %s : Speed Factor : %s" % (event.value, speed_fac)) elif event.axis == 1: # Left axis horizontal print("Value (Steer): %s" % event.value); elif event.axis == 2: # Right axis vertical print("Right joystic vertical: %s" % event.value) elif event.axis == 3: # Right axis horizontal print("Value (Dome): %s" % event.value) elif event.axis == 4: print("Right trigger") elif event.axis == 5: # Right axis horizontal print("Left trigger") else: print("JOYAXISMOTION event.axis: %s" % event.axis) elif event.type == pygame.JOYHATMOTION: lpad = event.value[0] < 0; rpad = event.value[0] > 0; dpad = event.value[1] < 0; upad = event.value[1] > 0; if not (lpad | rpad | dpad | upad): print("D-pad all buttons are now up") elif lpad and upad: print("D-pad left/up") elif lpad and dpad: print("D-pad left/down") elif dpad and rpad: print("D-pad right/down") elif rpad and upad: print("D-pad right/up") elif lpad: print("D-pad left") elif rpad: print("D-pad right") elif upad: print("D-pad up") elif dpad: print("D-pad down") else: print("pygame.JOYHATMOTION value:%s" % event.value) else: print(event)
4fc731bd9b6781222a0222650015ec85079efab2
3b10afd8f25b4de3d875b9e518606c6e28e265a4
/Project_1/Q4.py
a634a5d07f399ff7ff08772d06625a2665420a54
[]
no_license
denisdenisenko/Python_proj_1
44446779888c77eac869b27a077ebd45f3f5a37e
dd1572fcea35e200db14288a0cdcb86b62b00bd6
refs/heads/master
2023-03-05T18:10:16.090230
2021-02-19T18:41:16
2021-02-19T18:41:16
340,458,776
0
0
null
null
null
null
UTF-8
Python
false
false
1,117
py
def domain_with_max_messages_sent(given_file): """ Printing the domain with maximum messages sent :param given_file : .txt :return: void """ # name = input('Enter file:') file_to_handle = None try: file_to_handle = open(given_file) except: print('File cannot be opened:', given_file) exit() simple_dictionary_counter = dict() for line in file_to_handle: line = line.split() if "From:" in line: # Extracting the domain from the line line = line[1].split("@") line = line[1] simple_dictionary_counter[line] = simple_dictionary_counter.get(line, 0) + 1 continue maximum_value = None maximum_key = None # Finding the key with maximum value for word, count in simple_dictionary_counter.items(): if maximum_value is None or count > maximum_value: maximum_key = word maximum_value = count print(simple_dictionary_counter) print(" \n The domain with maximum messages sent is",maximum_key,"with", maximum_value, "messages")
017db2c5f91aa1d7b05705a8c583be8bbb404b2d
8b670d2f44f2b8081402ca36b2e1845b7a7c17f7
/modeler/api/timeseries_archive.py
39f2405ad5b712d79baed3c701f2b7a85a5f0c68
[]
no_license
ZackLeirdahl/StockModeler
03fcdd6232a83e18c33a324402cac9f9251bba7c
bedeeb1e64120550f07a7acb19158d6ae4d6f22f
refs/heads/master
2021-10-13T01:04:12.719813
2021-09-28T16:09:38
2021-09-28T16:09:38
203,873,858
0
1
null
null
null
null
UTF-8
Python
false
false
2,767
py
import pandas as pd def append_change(func): def wrapper(self, df): df = func(self, df) df[df.iloc[:,1].name+'_change'] = [0] + [round(df.iloc[i,1] - df.iloc[i-1,1],4) for i in range(1, df.shape[0])] df[df.iloc[:,1].name+'_changePercent'] = [0] + [round(100*(df.iloc[i,2] / df.iloc[i-1,1]),2) for i in range(1, df.shape[0])] return df.iloc[:,1:] return wrapper def finish(func): def wrapper(self, df): self.df = func(self,df) return {'root': self.root, 'descriptor': self.descriptor, 'df': self.df} return wrapper class TimeSeriesArchive: COLUMNS = ['implied_volatility','call_open_interest','put_open_interest','call_volume','put_volume','volume','open_interest'] def __init__(self, *args, **kwargs): self.symbol, self.descriptor, self.root = args[0], 'timeseries', 'options' self.data = self.get_frame(args[1]) @finish def get_frame(self, df): return pd.DataFrame(pd.Series(list(df['date'].unique()), name='date')).join([getattr(self, 'get_%s' % col)(df) for col in self.COLUMNS]) @append_change def get_implied_volatility(self, df): return pd.DataFrame([{'date': d, 'implied_volatility': df[df['date'] == d]['implied_volatility'].mean()} for d in list(df['date'].unique())])[['date','implied_volatility']] @append_change def get_call_open_interest(self, df): return pd.DataFrame([{'date':d, 'call_open_interest': df[df['date'] == d]['call_open_interest'].min()} for d in list(df['date'].unique())])[['date','call_open_interest']] @append_change def get_put_open_interest(self, df): return pd.DataFrame([{'date':d, 'put_open_interest': df[df['date'] == d]['put_open_interest'].min()} for d in list(df['date'].unique())])[['date','put_open_interest']] @append_change def get_call_volume(self, df): return pd.DataFrame([{'date':d, 'call_volume': df[df['date'] == d]['call_volume'].max()} for d in list(df['date'].unique())])[['date','call_volume']] @append_change def get_put_volume(self, df): return pd.DataFrame([{'date':d, 'put_volume': df[df['date'] == d]['put_volume'].max()} for d in list(df['date'].unique())])[['date','put_volume']] @append_change def get_volume(self, df): return pd.DataFrame([{'date':d, 'volume': df[df['date'] == d]['put_volume'].max() + df[df['date'] == d]['call_volume'].max()} for d in list(df['date'].unique())])[['date','volume']] @append_change def get_open_interest(self, df): return pd.DataFrame([{'date':d, 'open_interest': df[df['date'] == d]['put_open_interest'].max() + df[df['date'] == d]['call_open_interest'].max()} for d in list(df['date'].unique())])[['date','open_interest']]
7eca167313115265ce2b0d62dbcbf552cb1591f2
0c3b10d6f4a8c7684e840713e8e08f00a82a5c6e
/sw/radiotuner/frontend/tui.py
3f5dfb64f20600c52bd00524d69033ff70199bbc
[ "MIT" ]
permissive
diallodavid/pi-radio-bonnet
024073c3927c9e4ac2a46bf67e693c74f9e799fd
8b43468ff1ac23d57fef9f57c4f39fed36f827a6
refs/heads/master
2022-11-30T06:09:58.607368
2020-07-30T17:25:35
2020-07-30T17:25:35
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,175
py
import curses import time def TUI_main(tui_q, player_q): # Initialize Terminal User Interface stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.keypad(True) stdscr.nodelay(True) win = curses.newwin(32, 12, 0, 0) curses.curs_set(False) # Initialize colors curses.start_color() curses.init_pair(1, curses.COLOR_BLUE, curses.COLOR_WHITE) curses.init_pair(2, curses.COLOR_BLUE, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_RED, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.init_pair(5, curses.COLOR_YELLOW, curses.COLOR_BLACK) run_app=True while(run_app): char = stdscr.getch() if char == 261: # RIGTH ARROW player_q.put(['seek_up',None]) stdscr.clear() if char == 260: # LEFT ARROW player_q.put(['seek_down',None]) stdscr.clear() if char == 259: # UP ARROW player_q.put(['vol_up',None]) stdscr.clear() if char == 258: # DOWN ARROW player_q.put(['vol_down',None]) stdscr.clear() if char == ord('q'): # QUIT player_q.put(['quit',None]) run_app = False while tui_q.empty() is False: [msg, radio, volume]= tui_q.get() if msg == 'radiovol': refresh_TUI(stdscr, radio, volume) if msg == 'quit': run_app = False time.sleep(0.1) # End APP stdscr.keypad(False) stdscr.nodelay(False) curses.nocbreak() curses.echo() curses.endwin() def refresh_TUI(screen, radio, volume): blankline = " " screen.addstr(0, 0, " Si4731 Radio Receiver ", curses.A_REVERSE) if (radio is not None): screen.addstr(2, 6, "<<", curses.A_NORMAL) screen.addstr(2, 24, ">>", curses.A_NORMAL) screen.addstr(2, 11, ' ' + str(radio.station.Frequency) + ' MHz ', curses.color_pair(1) + curses.A_REVERSE) if(radio.rds.PS.string.isprintable()): screen.addstr(4,0,blankline,curses.A_NORMAL) screen.addstr(4, 16-int(len(radio.rds.PS.string)/2), radio.rds.PS.string, curses.color_pair(3) + curses.A_ITALIC) if(radio.rds.RadioTextA.string.isprintable()): screen.addstr(5,0,blankline,curses.A_NORMAL) screen.addstr(6,0,blankline,curses.A_NORMAL) if (len(radio.rds.RadioTextA.string) < 32): screen.addstr(5, 16-int(len(radio.rds.RadioTextA.string)/2), radio.rds.RadioTextA.string, curses.color_pair(2) + curses.A_NORMAL) else: RText = radio.rds.RadioTextA.string.split(maxsplit=2) screen.addstr(5, 16-int(len(RText[0])/2), RText[0], curses.color_pair(2) + curses.A_NORMAL) screen.addstr(6, 16-int(len(RText[1])/2), RText[1], curses.color_pair(2) + curses.A_NORMAL) screen.addstr(8, 5, "RSSI: " + str(radio.station.RSSI).zfill(2), curses.A_NORMAL) screen.addstr(8, 20, "SNR: " + str(radio.station.SNR).zfill(2), curses.A_NORMAL) screen.addstr(10, 0, " by iz2k ", curses.color_pair(4) + curses.A_NORMAL) screen.addstr(11, 0, " " + chr(9650) + chr(9660) + " use arrows " + chr(9668) + " " + chr(9658) + " (q)quit", curses.A_REVERSE) print_vol(screen, volume) screen.refresh() def get_vol_color_index(vol): idx = 4 # Green if vol > 4: idx = 5 # YELLOW if vol > 8: idx = 3 # RED return idx def print_vol(screen, volume): # Print volume squares for i in range(1, 11): if volume/10 > (10-i): screen.addstr(i, 31, '#', curses.color_pair(get_vol_color_index(11-i)) + curses.A_NORMAL) # Print volume number # Caluclate vertical position volposv = int(11-volume/10) # Fix position for 0% if(volposv>10): volposv=10 # Calculate horizontal position volposh = 32 - len(str(volume)) # Print volume screen.addstr(volposv, volposh, str(volume), curses.color_pair(get_vol_color_index(volume/10)) + curses.A_NORMAL)