blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a9c0c8275b4229082edfd52483c1218d2fbdf06c | 4dfeeb1a5a851b368d7a11107789913ac0a92dc6 | /food/migrations/0039_auto_20180305_0518.py | 120aad1d8d6c90c26eac0ce999a41b19e3b7046d | [] | no_license | caseyalananderson/thosedanggirls | dbc6a6e46a405099ffdf2ec04494666f56ae9f85 | 10f2c6d0f7760a0634b04aaab8e5715cae91411f | refs/heads/master | 2021-09-10T20:47:05.768447 | 2018-04-02T04:22:08 | 2018-04-02T04:22:08 | 122,774,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,648 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-05 05:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('food', '0038_auto_20180305_0502'),
]
operations = [
migrations.RemoveField(
model_name='foodpost',
name='breakfast',
),
migrations.RemoveField(
model_name='foodpost',
name='dessert',
),
migrations.RemoveField(
model_name='foodpost',
name='entree',
),
migrations.RemoveField(
model_name='foodpost',
name='gluten_free',
),
migrations.RemoveField(
model_name='foodpost',
name='healthy',
),
migrations.RemoveField(
model_name='foodpost',
name='savory',
),
migrations.RemoveField(
model_name='foodpost',
name='snack',
),
migrations.RemoveField(
model_name='foodpost',
name='vegan',
),
migrations.RemoveField(
model_name='foodpost',
name='vegetarian',
),
migrations.AddField(
model_name='recipe',
name='breakfast',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='dessert',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='entree',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='gluten_free',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='healthy',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='savory',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='snack',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='vegan',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='recipe',
name='vegetarian',
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
] | |
a71f74494f925b9be1146cfb286ff56ca819256b | 2c7af3d5256c49ee027afcaf8a53c644b6b0051b | /Algorithms/Sorting/bubblesort.py | b03bbfd7cf531d2459bf0a9cb5b6260007046fa9 | [] | no_license | cadiente-jomel/data-structures | 99a975fa4fdc05d837799f9b6ed613a44d0c19ef | 4439673d4b23983df04853af0873576e79b843a9 | refs/heads/master | 2023-04-16T02:35:36.374352 | 2021-03-17T12:23:25 | 2021-03-17T12:23:25 | 332,428,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | def bubble_sort(number_list: list) -> list:
num_length = len(number_list)
for i in range(num_length):
for j in range(0, num_length - i - 1):
if number_list[j] > number_list[j + 1]:
number_list[j], number_list[j +
1] = number_list[j + 1], number_list[j]
return number_list
if __name__ == '__main__':
num_list = [59, 1, 51, 2, 31, 23, 0]
res = bubble_sort(num_list)
print(res)
| [
"[email protected]"
] | |
5dacaf4423aa59fce5d995c6fc764212958f3f15 | 68af1af963119f2d0d91cc57fe34067d2e383f85 | /example/test_full/tests/test04_fk_fkback_multiple.py | cc20e114c77b4b19d405fcb96ec85dcc1d5c578e | [
"MIT"
] | permissive | olivierdalang/django-computedfields | 9d61d4e84ca1040dd4106f71176b676b05be85cf | 13a5c107904e7596eeafe948ffc66c9702d95589 | refs/heads/master | 2021-01-01T09:34:48.843533 | 2019-03-08T21:58:21 | 2019-03-08T21:58:21 | 239,220,787 | 0 | 0 | MIT | 2020-02-08T23:45:53 | 2020-02-08T23:45:53 | null | UTF-8 | Python | false | false | 5,955 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .base import GenericModelTestBase, MODELS
class MultipleDependenciesOne(GenericModelTestBase):
def setUp(self):
self.setDeps({
# fk + fk + fk_back + fk_back
'C': {'depends': ['f_cb.f_ba.ag_f.gd_f#name', 'cd_f.de_f#name'],
'func': lambda self: self.name + ''.join(
MODELS['D'].objects.filter(f_dg__in=MODELS['G'].objects.filter(
f_ga=self.f_cb.f_ba)).values_list('name', flat=True)) + ''.join(
MODELS['E'].objects.filter(f_ed__in=self.cd_f.all()).values_list('name', flat=True)
)},
})
self.a = self.models.A(name='a')
self.a.save()
self.b = self.models.B(name='b', f_ba=self.a)
self.b.save()
self.c = self.models.C(name='c', f_cb=self.b)
self.c.save()
self.d = self.models.D(name='d', f_dc=self.c)
self.d.save()
self.e = self.models.E(name='e', f_ed=self.d)
self.e.save()
self.f = self.models.F(name='f', f_fe=self.e)
self.f.save()
self.g = self.models.G(name='g', f_gf=self.f, f_ga=self.a)
self.g.save()
self.d.f_dg = self.g
self.d.save()
def tearDown(self):
self.resetDeps()
def test_C_insert(self):
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cde')
def test_C_update(self):
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cde')
# change D
self.d.name = 'D'
self.d.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDe')
# add new D
new_d = self.models.D(name='d2', f_dg=self.g)
new_d.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2e')
# change E
self.e.name = 'E'
self.e.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2E')
# add new E
new_e = self.models.E(name="e2", f_ed=self.d)
new_e.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2Ee2')
def test_C_update_deletes(self):
# change D
self.d.name = 'D'
self.d.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDe')
# add new D
new_d = self.models.D(name='d2', f_dg=self.g)
new_d.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2e')
# change E
self.e.name = 'E'
self.e.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2E')
# add new E
new_e = self.models.E(name="e2", f_ed=self.d)
new_e.save()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDd2Ee2')
# delete new_d
new_d.delete()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'cDEe2')
# delete d - should remove D, E and e2
self.d.delete()
self.c.refresh_from_db()
self.assertEqual(self.c.comp, 'c')
class MultipleDependenciesTwo(GenericModelTestBase):
def setUp(self):
self.setDeps({
# fk_back + fk_back + fk_back + fk + fk + fk
'D': {'depends': ['de_f.ef_f.fg_f.f_ga.f_ac.f_cb#name', 'f_dc.f_cb#name'],
'func': lambda self: self.name + ''.join(filter(bool, MODELS['G'].objects.filter(
f_gf__in=MODELS['F'].objects.filter(
f_fe__in=self.de_f.all())).values_list(
'f_ga__f_ac__f_cb__name', flat=True))) + self.f_dc.f_cb.name}
})
self.a = self.models.A(name='a')
self.a.save()
self.b = self.models.B(name='b', f_ba=self.a)
self.b.save()
self.c = self.models.C(name='c', f_cb=self.b)
self.c.save()
self.a.f_ac = self.c
self.a.save()
self.d = self.models.D(name='d', f_dc=self.c)
self.d.save()
self.e = self.models.E(name='e', f_ed=self.d)
self.e.save()
self.f = self.models.F(name='f', f_fe=self.e)
self.f.save()
self.g = self.models.G(name='g', f_gf=self.f, f_ga=self.a)
self.g.save()
def tearDown(self):
self.resetDeps()
def test_D_insert(self):
self.d.refresh_from_db()
self.assertEqual(self.d.comp, 'dbb')
def test_D_update(self):
self.d.refresh_from_db()
self.assertEqual(self.d.comp, 'dbb')
# change B --> should change both deps
self.b.name = 'B'
self.b.save()
self.d.refresh_from_db()
self.assertEqual(self.d.comp, 'dBB')
# add new A, B and C, change f_ga
new_b = self.models.B(name='b2')
new_b.save()
new_c = self.models.C(name='c2', f_cb=new_b)
new_c.save()
new_a = self.models.A(name='A', f_ac=new_c)
new_a.save()
self.g.f_ga = new_a
self.g.save()
self.d.refresh_from_db()
# this should only change the "first" B dep
self.assertEqual(self.d.comp, 'db2B')
def test_D_update_deletes(self):
# change B --> should change both deps
self.b.name = 'B'
self.b.save()
self.d.refresh_from_db()
self.assertEqual(self.d.comp, 'dBB')
# add new A, B and C, change f_ga
new_b = self.models.B(name='b2')
new_b.save()
new_c = self.models.C(name='c2', f_cb=new_b)
new_c.save()
new_a = self.models.A(name='A', f_ac=new_c)
new_a.save()
self.g.f_ga = new_a
self.g.save()
self.d.refresh_from_db()
# this should only change the "first" B dep
self.assertEqual(self.d.comp, 'db2B')
# delete new_b - should remove b2
new_b.delete()
self.d.refresh_from_db()
self.assertEqual(self.d.comp, 'dB')
| [
"[email protected]"
] | |
ae804d594a3f188266be87fbd9b5ba5cd1e02a65 | 0ce934553a854e5a3d28971f73be19d0912449bf | /contactUs/forms.py | 1ffeac66e649dbe0e2f24bcc95ca43edc6f5af03 | [] | no_license | keennhlc/GKWeb | d0c1c2617e2334ee9aba6e3b741d049cf75c9a62 | db34c14a4be13fab1cf16de66fc406b7142d7fcb | refs/heads/master | 2020-05-01T09:19:13.871041 | 2019-03-24T10:20:40 | 2019-03-24T10:20:40 | 177,397,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | from django import forms
from .models import ContactInfo
class ContactForm(forms.ModelForm):
class Meta:
model = ContactInfo
fields = ['first_name', 'last_name', 'middle_name', 'birthday', 'gender', 'previous_schooling', 'contact_number', 'email', 'interested_course', 'comment']
| [
"[email protected]"
] | |
ca725ff19b7705e1e47573835fade1d50f49cc90 | a4ab9f4ea9cb73b70dd0c63fb759a0d8efeced3d | /setup.py | 6be15f6b2a5b53b5ca21616fa535a4f3356f9d95 | [] | no_license | Ravirajadrangi/Toy-Autonomous-Racecar | 32fd588be8c648d2a945f4b633e7197edf60ed9d | 01c448608befc68b0963b17ec951f1c71964e47e | refs/heads/master | 2021-01-15T19:14:17.573108 | 2017-06-05T05:11:01 | 2017-06-05T05:11:01 | 99,812,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,191 | py | try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup, find_packages
classifiers = ['Development Status :: Alpha',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Hardware']
setup(name = 'STAR',
version = '1.0.1',
author = 'Max Ferguson',
author_email = '[email protected]',
description = 'Reinforcement learning code and hardware drivers for Stanford Toy Autonomous Racecar',
license = 'MIT',
classifiers = classifiers,
url = 'https://github.com/maxkferg/stanford-cs234-project/',
dependency_links = ['https://github.com/adafruit/Adafruit_Python_GPIO/tarball/master#egg=Adafruit-GPIO-0.6.5'],
install_requires = ['Adafruit-GPIO>=0.6.5'],
packages = find_packages())
| [
"[email protected]"
] | |
e77505ef3412a7ba2a496293b82745688312c548 | 6edf34c85835d1ebad9812588b1bb765cb9b398c | /scrape_a_column.py | 4f336e8e3504dc6642edd05fd350a4b3ba9f6aa3 | [] | no_license | n8mob/py | f2610be3a6f65740631b955296c1e52babc3d467 | a37ad5a5a18432e2cd21480f35e5a0eb0db7ac01 | refs/heads/main | 2023-08-18T05:08:47.732331 | 2023-08-17T17:11:32 | 2023-08-17T17:11:32 | 87,347,522 | 0 | 0 | null | 2023-08-22T23:52:42 | 2017-04-05T19:17:27 | Lua | UTF-8 | Python | false | false | 1,855 | py | from os.path import exists
import requests
from bs4 import BeautifulSoup
vhf_society_repeater_list_url = 'http://www.utahvhfs.org/rptr.html'
soup = {}
cached_request_path = 'page.cache.html'
if __name__ == '__main__':
if not soup:
print('no cached object')
if exists(cached_request_path):
print(f'reading file: {cached_request_path}')
soup = BeautifulSoup(open(cached_request_path, 'r'), features='html.parser')
else:
print(f'no file, requesting from {vhf_society_repeater_list_url}')
response = requests.get(vhf_society_repeater_list_url)
print(f'response has encoding: {response.encoding}')
f = open(cached_request_path, 'w')
f.write(response.text)
soup = BeautifulSoup(response.text, features='html.parser')
print(f'Loaded HTML document: {soup.title.text}')
two_meter_rows = soup.findAll('table')[3].findAllNext('tr')
header_row = two_meter_rows[0]
two_meter_rows = two_meter_rows[1:]
table_headers = [th.text.replace('\xa0', ' ') for th in two_meter_rows[0].findAllNext('th')]
ctcss_column_index = table_headers.index('CTCSS')
a_row = two_meter_rows[10]
print(f'{a_row.text=}')
data_from_first_row = [td.text.replace('\xa0', ' ') for td in a_row.find_all('td')]
print(f'{data_from_first_row=}')
all_data = []
for row in two_meter_rows:
all_data.append([td.text.replace('\xa0', ' ') for td in row.find_all('td')])
print(f'{len(all_data)=}')
unique_ctcss_tones = {td[ctcss_column_index] for td in all_data if len(td) > ctcss_column_index}
pl_tones = []
for tone in unique_ctcss_tones:
try:
pl_tones.append(float(tone))
except ValueError:
...
pl_tones = sorted(pl_tones)
print(f'{pl_tones=}')
| [
"[email protected]"
] | |
ae09a2bc39ad7fcea0730f7d55a139a6b11db681 | 6364bb727b623f06f6998941299c49e7fcb1d437 | /msgraph-cli-extensions/src/subscriptions/azext_subscriptions/vendored_sdks/subscriptions/_configuration.py | 1fc1e11a28fb7fa4c472746b400392115428c759 | [
"MIT"
] | permissive | kanakanaidu/msgraph-cli | 1d6cd640f4e10f4bdf476d44d12a7c48987b1a97 | b3b87f40148fb691a4c331f523ca91f8a5cc9224 | refs/heads/main | 2022-12-25T08:08:26.716914 | 2020-09-23T14:29:13 | 2020-09-23T14:29:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,633 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
VERSION = "unknown"
class SubscriptionsConfiguration(Configuration):
"""Configuration for Subscriptions.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param top: Show only the first n items.
:type top: int
:param skip: Skip the first n items.
:type skip: int
:param search: Search items by search phrases.
:type search: str
:param filter: Filter items by property values.
:type filter: str
:param count: Include count of items.
:type count: bool
"""
def __init__(
self,
credential, # type: "TokenCredential"
top=None, # type: Optional[int]
skip=None, # type: Optional[int]
search=None, # type: Optional[str]
filter=None, # type: Optional[str]
count=None, # type: Optional[bool]
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
super(SubscriptionsConfiguration, self).__init__(**kwargs)
self.credential = credential
self.top = top
self.skip = skip
self.search = search
self.filter = filter
self.count = count
self.credential_scopes = ['https://management.azure.com/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'subscriptions/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
"[email protected]"
] | |
78f2569c7833ca851092866bf0360045e76f6ccb | 6a40ca008a201a352be73a305f483de344495a4c | /operation.py | b920f4e11eb2381c31b048fe04c4ba74bfeb53a0 | [] | no_license | manojbahadur/python-programs | 301032bfe95be9ea98526822c20364a536bf0f3f | 0ec7aedb1a269f7205eef43ed94529d5e9b05395 | refs/heads/master | 2020-04-24T14:06:51.024496 | 2019-04-01T03:17:59 | 2019-04-01T03:17:59 | 172,009,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | a=5
b=10
print("addition of", a ,"and" ,b ,"is",a+b)
print("multiplication of ", a, "and", b, "is",a*b)
print("division of", a ,"and", b ,"is",a/b)
print("subraction of", a ,"and", b ,"is",a-b) | [
"[email protected]"
] | |
e0109f6985e842ce24a7b12d4edcfd170af914ac | f0ce2b0f8fc56b6aa6dc34b636c3c7ed2f5fe128 | /main/settings.py | 674ea8823eb2b1612278e9552916be8f3ca4c3e5 | [] | no_license | MoSanogo/profiles | 5da58f190e2781068ba9903c3bd0b5830b45fcbc | 07d20aa5147d3bee5b2772ca11153cb01541dc18 | refs/heads/master | 2023-06-05T17:11:45.811017 | 2021-06-22T03:42:56 | 2021-06-22T03:42:56 | 372,931,206 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,167 | py | """
Django settings for main project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4cy88r4_2in+s9ryu+(gpb@0!2#uo^+6nlkbe^mdjay%ghmdd&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'main.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'main.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL="profiles.UserProfile" | [
"[email protected]"
] | |
d470a432299b5d1d39d7f9de4a11d83141eb34ea | eb7653acd0ce24e4c0ab4855180d5ef550dbc698 | /app/routes.py | bafd3c6b716dd4e97240fa5a7ad1b8589b023e7c | [] | no_license | lyabakhP/flask-blog | a449e06a92b27022d5e8941804922ce6209d42a2 | 60077b757b28796fee2b6a8f41408818f7078b25 | refs/heads/master | 2022-12-16T05:52:12.206014 | 2018-12-10T20:00:14 | 2018-12-10T20:00:14 | 160,950,846 | 0 | 0 | null | 2022-12-08T00:48:23 | 2018-12-08T15:11:56 | Python | UTF-8 | Python | false | false | 8,374 | py | # from datetime import datetime
# from flask import render_template, flash, redirect, url_for, request, g, \
# jsonify
# from flask_login import login_user, logout_user, current_user, login_required
# from werkzeug.urls import url_parse
# from flask_babel import _, get_locale
# from guess_language import guess_language
# from app import app, db
# from app.forms import LoginForm, RegistrationForm, EditProfileForm, PostForm, \
# ResetPasswordRequestForm, ResetPasswordForm
# from app.models import User, Post
# from app.email import send_password_reset_email
# from app.translate import translate
# @app.before_request
# def before_request():
# if current_user.is_authenticated:
# current_user.last_seen = datetime.utcnow()
# db.session.commit()
# g.locale = str(get_locale())
# @app.route('/', methods=['GET', 'POST'])
# @app.route('/index', methods=['GET', 'POST'])
# @login_required
# def index():
# form = PostForm()
# if form.validate_on_submit():
# language = guess_language(form.post.data)
# if language == 'UNKNOWN' or len(language) > 5:
# language = ''
# post = Post(body=form.post.data, author=current_user,
# language=language)
# db.session.add(post)
# db.session.commit()
# flash(_('Your post is now live!'))
# return redirect(url_for('index'))
# page = request.args.get('page', 1, type=int)
# posts = current_user.followed_posts().paginate(
# page, app.config['POSTS_PER_PAGE'], False)
# next_url = url_for('index', page=posts.next_num) \
# if posts.has_next else None
# prev_url = url_for('index', page=posts.prev_num) \
# if posts.has_prev else None
# return render_template('index.html', title=_('Home'), form=form,
# posts=posts.items, next_url=next_url,
# prev_url=prev_url)
# @app.route('/explore')
# @login_required
# def explore():
# page = request.args.get('page', 1, type=int)
# posts = Post.query.order_by(Post.timestamp.desc()).paginate(
# page, app.config['POSTS_PER_PAGE'], False)
# next_url = url_for('explore', page=posts.next_num) \
# if posts.has_next else None
# prev_url = url_for('explore', page=posts.prev_num) \
# if posts.has_prev else None
# return render_template('index.html', title=_('Explore'),
# posts=posts.items, next_url=next_url,
# prev_url=prev_url)
# @app.route('/login', methods=['GET', 'POST'])
# def login():
# if current_user.is_authenticated:
# return redirect(url_for('index'))
# form = LoginForm()
# if form.validate_on_submit():
# user = User.query.filter_by(username=form.username.data).first()
# if user is None or not user.check_password(form.password.data):
# flash(_('Invalid username or password'))
# return redirect(url_for('login'))
# login_user(user, remember=form.remember_me.data)
# next_page = request.args.get('next')
# if not next_page or url_parse(next_page).netloc != '':
# next_page = url_for('index')
# return redirect(next_page)
# return render_template('login.html', title=_('Sign In'), form=form)
# @app.route('/logout')
# def logout():
# logout_user()
# return redirect(url_for('index'))
# @app.route('/register', methods=['GET', 'POST'])
# def register():
# if current_user.is_authenticated:
# return redirect(url_for('index'))
# form = RegistrationForm()
# if form.validate_on_submit():
# user = User(username=form.username.data, email=form.email.data)
# user.set_password(form.password.data)
# db.session.add(user)
# db.session.commit()
# flash(_('Congratulations, you are now a registered user!'))
# return redirect(url_for('login'))
# return render_template('register.html', title=_('Register'), form=form)
# @app.route('/reset_password_request', methods=['GET', 'POST'])
# def reset_password_request():
# if current_user.is_authenticated:
# return redirect(url_for('index'))
# form = ResetPasswordRequestForm()
# if form.validate_on_submit():
# user = User.query.filter_by(email=form.email.data).first()
# if user:
# send_password_reset_email(user)
# flash(
# _('Check your email for the instructions to reset your password'))
# return redirect(url_for('login'))
# return render_template('reset_password_request.html',
# title=_('Reset Password'), form=form)
# @app.route('/reset_password/<token>', methods=['GET', 'POST'])
# def reset_password(token):
# if current_user.is_authenticated:
# return redirect(url_for('index'))
# user = User.verify_reset_password_token(token)
# if not user:
# return redirect(url_for('index'))
# form = ResetPasswordForm()
# if form.validate_on_submit():
# user.set_password(form.password.data)
# db.session.commit()
# flash(_('Your password has been reset.'))
# return redirect(url_for('login'))
# return render_template('reset_password.html', form=form)
# @app.route('/user/<username>')
# @login_required
# def user(username):
# user = User.query.filter_by(username=username).first_or_404()
# page = request.args.get('page', 1, type=int)
# posts = user.posts.order_by(Post.timestamp.desc()).paginate(
# page, app.config['POSTS_PER_PAGE'], False)
# next_url = url_for('user', username=user.username, page=posts.next_num) \
# if posts.has_next else None
# prev_url = url_for('user', username=user.username, page=posts.prev_num) \
# if posts.has_prev else None
# return render_template('user.html', user=user, posts=posts.items,
# next_url=next_url, prev_url=prev_url)
# @app.route('/edit_profile', methods=['GET', 'POST'])
# @login_required
# def edit_profile():
# form = EditProfileForm(current_user.username)
# if form.validate_on_submit():
# current_user.username = form.username.data
# current_user.about_me = form.about_me.data
# db.session.commit()
# flash(_('Your changes have been saved.'))
# return redirect(url_for('edit_profile'))
# elif request.method == 'GET':
# form.username.data = current_user.username
# form.about_me.data = current_user.about_me
# return render_template('edit_profile.html', title=_('Edit Profile'),
# form=form)
# @app.route('/follow/<username>')
# @login_required
# def follow(username):
# user = User.query.filter_by(username=username).first()
# if user is None:
# flash(_('User %(username)s not found.', username=username))
# return redirect(url_for('index'))
# if user == current_user:
# flash(_('You cannot follow yourself!'))
# return redirect(url_for('user', username=username))
# current_user.follow(user)
# db.session.commit()
# flash(_('You are following %(username)s!', username=username))
# return redirect(url_for('user', username=username))
# @app.route('/unfollow/<username>')
# @login_required
# def unfollow(username):
# user = User.query.filter_by(username=username).first()
# if user is None:
# flash(_('User %(username)s not found.', username=username))
# return redirect(url_for('index'))
# if user == current_user:
# flash(_('You cannot unfollow yourself!'))
# return redirect(url_for('user', username=username))
# current_user.unfollow(user)
# db.session.commit()
# flash(_('You are not following %(username)s.', username=username))
# return redirect(url_for('user', username=username))
# @app.route('/translate', methods=['POST'])
# @login_required
# def translate_text():
# return jsonify({'text': translate(request.form['text'],
# request.form['source_language'],
# request.form['dest_language'])}) | [
"[email protected]"
] | |
8c556ece085b5abb68e2a3aba88475d9429c99f0 | 4ae68767d525b441b293dcfd1d7f0dd8ca456d2a | /moniter_traders/trader1.py | aea0afd04e0fd47edb056c020bc76df50753dc38 | [] | no_license | StevenMaharaj/many_traders | 523162929ce0ed0899f70e2c688043bbea96b3b5 | a2f3247fc6ed04b6d293a5062a41d1480698ac5f | refs/heads/master | 2022-12-20T01:30:04.737056 | 2020-08-23T15:35:05 | 2020-08-23T15:35:05 | 288,927,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | import numpy as np
import asyncio
import asyncio
import websockets
async def hello():
uri = "ws://localhost:8765"
async with websockets.connect(uri) as websocket:
trader_no = 1
await websocket.send(f"trader {trader_no} has logged on")
while websocket.open:
while True:
action = np.random.choice(["BUY","SELL"])
await websocket.send(f"Trader {trader_no} makes a {action} order")
await asyncio.sleep(1)
asyncio.get_event_loop().run_until_complete(hello()) | [
"[email protected]"
] | |
101aafdf11884552b8fe84b7119071b94791664b | 5390306b57bf4ed2e6a7806b82a224e63bd71cda | /libs/imgp.py | 22eb86d3a30aa87fc67450149c0e4fc679c2f3cb | [
"MIT"
] | permissive | myygunduz/python-turkiye-discord-bot | af118afe3739d6d1713e153e2e2cfd79df81b268 | cb13167d21d2727b2ebe411ed4ebbc90b1fbe37c | refs/heads/main | 2023-07-09T09:39:10.656825 | 2021-08-14T14:08:43 | 2021-08-14T14:08:43 | 395,501,720 | 1 | 0 | MIT | 2021-08-13T02:45:52 | 2021-08-13T02:45:52 | null | UTF-8 | Python | false | false | 6,719 | py | # Part of the Python Türkiye Discord Bot's libraries
#
# EN:
# Uses pygame to create image that displays user's level.
#
# TR:
# Kullanıcıların seviyelerini gösteren görseli
# oluşturmak için pygame kullanır.
#
# https://github.com/kadir014/python-turkiye-discord-bot
import os; os.environ['SDL_VIDEODRIVER'] = 'dummy'
import discord
import pygame
import datetime
pygame.init()
pygame.display.set_mode((1,1))
tweet_follow = pygame.image.load("data/tweet_follow.png").convert()
tweet_controls = pygame.image.load("data/tweet_controls.png").convert()
fonts = {
"Segoe UI - 24" : pygame.font.Font("data/Segoe UI.ttf", 24),
"Gil - 30" : pygame.font.Font("data/gil.ttf", 30),
"Gil - 50" : pygame.font.Font("data/gil.ttf", 50)
}
turkish_chars = {"ç":"c", "ğ":"g", "ı":"i", "ö":"o", "ü":"u",
"Ç":"C", "Ğ":"G", "İ":"I", "Ö":"O", "Ü":"U"}
# TODO: Optimize
def circle_mask(surface):
circlesurf = pygame.Surface(surface.get_size()).convert()
circlesurf.fill((255, 0, 254))
c = surface.get_width()/2
pygame.draw.circle(circlesurf, (0, 0, 0), (c, c), c)
circlesurf.set_colorkey((0, 0, 0))
resultsurf = pygame.Surface(surface.get_size())
resultsurf.set_colorkey((255, 0, 254, 255))
resultsurf.blit(surface, (0, 0))
resultsurf.blit(circlesurf, (0, 0))
returnsurf = pygame.Surface(surface.get_size(), pygame.SRCALPHA).convert_alpha()
returnsurf.blit(resultsurf, (0, 0))
return returnsurf
async def get_avatar(user):
with open("data/_tempavatar.webp", "wb") as f: f.write(await user.avatar_url.read())
return pygame.image.load("data/_tempavatar.webp").convert_alpha()
async def generate_tweet(user, textt, tname="", date="", rt=0, love=0):
# user -> discord.Member
s = ""
for char in textt:
if char in turkish_chars: s += turkish_chars[char]
else: s += char
text = s
name = user.display_name
if len(tname) == 0: tname = name.lower().replace(" ", "_").replace("-", "_")
if len(date) == 0: date = datetime.datetime.now().strftime("%H:%M - %d %m %Y")
if rt > 10000: rt = f"{int(rt/1000)}K"
elif rt > 1000: rt = str(rt)[0] + "," + str(rt)[1:]
if love > 10000: love = f"{int(love/1000)}K"
elif love > 1000: love = str(love)[0] + "," + str(love)[1:]
surface = pygame.Surface((700, 307)).convert()
surface.fill((255, 255, 255))
avatar = await get_avatar(user)
avatar = pygame.transform.scale(avatar, (55, 55)).convert_alpha()
surface.blit(avatar, (30, 30))
surface.blit(tweet_follow, (535, 30))
surface.blit(tweet_controls, (33, 258))
surface.blit(fonts["Segoe UI - 24"].render(name, True, (28, 32, 35)), (100, 26))
surface.blit(fonts["Segoe UI - 24"].render(f"@{tname}", True, (110, 125, 140)), (100, 55))
surface.blit(fonts["Segoe UI - 24"].render(date, True, (110, 125, 140)), (30, 210))
surface.blit(fonts["Segoe UI - 24"].render(str(rt), True, (100, 120, 130)), (123, 255))
surface.blit(fonts["Segoe UI - 24"].render(str(love), True, (100, 120, 130)), (233, 255))
lines = []
words = text.split(" ")
print(len(words))
w = 0
s = ""
for word in words:
w += len(word) + 1
s += word + " "
if w > 46:
lines.append(s)
s = ""
w = 0
if len(lines) == 0: lines.append(word)
lines = lines[:3]
for y, line in enumerate(lines):
surface.blit(fonts["Segoe UI - 24"].render(line, True, (28, 32, 35)), (30, 100+(y*26)))
return surface
async def profil_yap(user, dbuser, db):
# user -> discord.Member
# dbuser ->
# db -> libs.db.RedisWrapper
if user.id == 311542309252497409: bg = pygame.image.load("data/backgrounds/sky.png")
elif user.id == 365120946299731990: bg = pygame.image.load("data/backgrounds/reis.png")
else: bg = pygame.Surface((650, 200), pygame.SRCALPHA).convert_alpha()
img = pygame.Surface((650, 200), pygame.SRCALPHA).convert_alpha()
avatar = await get_avatar(user)
avatar = pygame.transform.scale(avatar, (183, 183)).convert_alpha()
avatar = circle_mask(avatar)
avatar.set_colorkey((255, 0, 254))
pygame.draw.circle(avatar, (255, 0, 254), (152, 152), 29)
name = ""
for c in user.display_name:
if c in turkish_chars: name += turkish_chars[c]
elif ord(c) < 128: name += c
ts = fonts["Gil - 50"].render(name, True, (255, 255, 255))
ts2 = fonts["Gil - 50"].render(name, True, (0, 0, 0))
ts3 = fonts["Gil - 30"].render(f"Level {db.calc_level(dbuser)} - {dbuser-db.pre_xp(dbuser)} / {db.next_xp(dbuser)-db.pre_xp(dbuser)}", True, (255, 255, 255))
ts4 = fonts["Gil - 30"].render(f"Level {db.calc_level(dbuser)} - {dbuser-db.pre_xp(dbuser)} / {db.next_xp(dbuser)-db.pre_xp(dbuser)}", True, (0, 0, 0))
outline = pygame.Surface((650, 200), pygame.SRCALPHA).convert_alpha()
pygame.draw.circle(outline, (255, 255, 255), (99, 99), 93)
pygame.draw.circle(outline, (255, 0, 255), (152, 152), 29)
outline.set_colorkey((255, 0, 255))
img.blit(outline, (0, 0))
if user.status == discord.Status.online:
status = pygame.image.load("data/status/cevrimici.png")
elif user.status == discord.Status.dnd:
status = pygame.image.load("data/status/rahatsiz.png")
elif user.status == discord.Status.idle:
status = pygame.image.load("data/status/bosta.png")
else:
status = pygame.image.load("data/status/cevrimdisi.png")
img.blit(status, (0, 0))
bar = pygame.Surface((650, 200), pygame.SRCALPHA).convert_alpha()
bar.fill((255, 0, 255, 255))
bar.set_colorkey((255, 0, 255))
pygame.draw.rect(bar, (0, 0, 0, 190), (178, 72, 617-178, 125-72))
pygame.draw.circle(bar, (0, 0, 0, 190), (617, 99), (125-72)/2)
xp_orani = (dbuser-db.pre_xp(dbuser)) / (db.next_xp(dbuser)-db.pre_xp(dbuser))
pygame.draw.rect(bar, (0, 255, 0, 220), (178, 72, (617-178)*xp_orani, 125-72))
pygame.draw.circle(bar, (0, 255, 0, 220), (178+(617-178)*xp_orani, 99), (125-72)/2)
pygame.draw.circle(bar, (255, 0, 255, 255), (113, 99), 91)
img.blit(bar, (0, 0))
img.blit(avatar, (8, 8))
img.blit(ts2, (197, 14))
img.blit(ts2, (198, 10))
img.blit(ts2, (196, 10))
img.blit(ts2, (198, 14))
img.blit(ts2, (196, 14))
img.blit(ts, (197, 10))
img.blit(ts4, (200, 142))
img.blit(ts4, (201, 140))
img.blit(ts4, (199, 140))
img.blit(ts4, (201, 142))
img.blit(ts4, (199, 142))
img.blit(ts3, (200, 140))
if user.id == 311542309252497409: img.blit(pygame.image.load("data/badges/satania.png"), (0, 0))
bg.blit(img, (0, 0))
pygame.image.save(bg, "data/profile.png")
| [
"[email protected]"
] | |
1380c3325cab018e45807a65e55c69880b2c3d62 | 684b77e02e1053c331111d3390f45328e95e746f | /main.py | 9ef3360f982d5060d38e9dfd70f5ae545c4d3b14 | [] | no_license | Hacker1337/ball-game | 679578ebbc8fb48347a94fa9d076ee8143c3827c | 3da0b3d19d4b6c019eafd5d2c3367d16d53bbca9 | refs/heads/master | 2023-08-18T17:44:55.892877 | 2021-10-12T13:48:20 | 2021-10-12T13:58:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,912 | py | import os
import pygame
from pygame.draw import *
from random import randint
from random import random
pygame.init()
FPS = 60
field_height = 900
field_width = 1200
screen = pygame.display.set_mode((field_width, field_height))
score = 0
RED = (255, 0, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
GREEN = (0, 255, 0)
MAGENTA = (255, 0, 255)
CYAN = (0, 255, 255)
BLACK = (0, 0, 0)
COLORS = [RED, BLUE, YELLOW, GREEN, MAGENTA, CYAN]
def new_ball():
"""
Paints new ball
:returns his position and size
"""
x = randint(100, field_width - 100)
y = randint(100, field_height - 100)
r = randint(20, 100)
color = COLORS[randint(0, 5)]
circle(screen, color, (x, y), r)
return (x, y), r
def click(e, x, y, r):
"""Runs after mouse click"""
mx, my = e.pos # mouse position
inside = (x - mx) ** 2 + (y - my) ** 2 <= r * r
return inside
# if inside:
# print(x, y, r)
# print(e.pos)
def speed_modification():
"""Generates coefficient for multiplying balls speed"""
return 0.90 + random() * 0.15
class Target:
def __init__(self, screen):
self.screen = screen
self.x = randint(100, field_width - 100)
self.y = randint(100, field_height - 100)
self.r = randint(10, 100)
self.color = COLORS[randint(0, 5)]
self.dx = randint(1, 5)
self.dy = randint(1, 5)
self.score_rate = 1 # how difficult the target is
def paint(self):
pass
def move(self):
if (self.x + self.r + self.dx >= field_width) or (self.x - self.r + self.dx <= 0):
self.dx = -self.dx * speed_modification()
self.dy = self.dy * speed_modification()
if (self.y + self.r + self.dy >= field_height) or (self.y - self.r + self.dy <= 0):
self.dx = self.dx * speed_modification()
self.dy = -self.dy * speed_modification()
self.x += self.dx
self.y += self.dy
self.paint()
def insight(self, mx, my):
pass
class Ball(Target):
def __init__(self, screen):
Target.__init__(self, screen)
self.score_rate = 1
def paint(self):
circle(screen, self.color, (self.x, self.y), self.r)
def insight(self, mx, my):
return (self.x - mx) ** 2 + (self.y - my) ** 2 <= self.r * self.r
class Square(Target):
def __init__(self, screen):
Target.__init__(self, screen)
self.score_rate = 10
def paint(self):
rect(screen, self.color, [(self.x - self.r, self.y - self.r), (2 * self.r, 2 * self.r)])
def insight(self, mx, my):
return (abs(mx - self.x) <= self.r) and (abs(my - self.y) <= self.r)
pygame.display.update()
clock = pygame.time.Clock()
finished = False
# Balls initialization
targets = []
ball_number = 50
for i in range(ball_number):
tar = Ball(screen)
targets.append(tar)
square_number = 5
for i in range(square_number):
square = Square(screen)
targets.append(square)
while not finished:
clock.tick(FPS)
screen.fill(BLACK)
# Balls movement
for tar in targets:
tar.move()
for event in pygame.event.get():
if event.type == pygame.QUIT:
finished = True
elif event.type == pygame.MOUSEBUTTONDOWN:
for i in range(len(targets)):
tar = targets[i]
if click(event, tar.x, tar.y, tar.r):
score += 1000 / tar.r * tar.score_rate * (tar.dx ** 2 + tar.dy ** 2)
if random() > 0.5:
targets[i] = Ball(screen)
else:
targets[i] = Square(screen)
print(format(score, '0.1f'))
pygame.display.update()
pygame.quit()
name = input("Enter your nickname:\n")
scorefile = "score.txt"
file = open(scorefile, 'a')
print(f"{name}\t {format(score, '0.1f')}")
print(f"{name}\t {score}", file=file)
| [
"[email protected]"
] | |
df8ae32f57ae7080cc99251eb8a64f4386b48973 | 29c70acf87d45fc659a5dd34d141ec935867b530 | /Rabi_single_qubit.py | 803f0349991f8fcc61c77d2bc8346a39aea8efc0 | [] | no_license | aniketmt/quantum-sim | 6dd8ab0b3f08cad7aab3c6ccf168d5e7d61918fb | 85c9c872e89beb7fb44a1ccd6935004cd1e3c6f1 | refs/heads/master | 2021-06-17T14:15:36.268709 | 2017-05-22T19:27:06 | 2017-05-22T19:27:06 | 91,388,137 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,773 | py | """
Rabi oscillations of a single qubit, with decoherence
Author: Aniket Maiti
"""
from qutip import *
from matplotlib import pyplot as plt
from numpy import pi, linspace, sqrt, arctan, cos, sin, multiply
from exp_decay_2 import fit
#
# Problem parameters:
#
delta = 0.0 * 2 * pi # qubit sigma_x coefficient
eps0 = 1.0 * 2 * pi # qubit sigma_z coefficient
A = 0.25 * 2 * pi # drive amplitude (reducing -> RWA more accurate)
w = 1.0 * 2 * pi # drive frequency
gamma = 0.10 * 4/3 # relaxation rate = 1/T1
kappa = 0.10 # dephasing rate = 1/T_phi
n_th = 0.0 # average number of excitations ("temperature")
theta = arctan(delta/eps0) # Effective angle of initial state wrt z-axis
psi0 = cos(theta/2)*fock(2, 1) + sin(theta/2)*fock(2, 0) # initial state
use_rwa = False # Whether to use Rotating Wave Approx
#
# Operators
#
sx = sigmax()
sy = sigmay()
sz = sigmaz()
sm = destroy(2)
#
# Collapse operators
#
cops = []
# qubit relaxation
rate = (n_th+1) * gamma
if rate > 0:
cops.append(sqrt(rate)*sm)
# qubit excitation by thermal photons
rate = n_th * gamma
if rate > 0:
cops.append(sqrt(rate)*sm.dag())
# qubit dephasing
rate = kappa
if rate > 0:
cops.append(sqrt(rate)*sz)
# time space
tlist = linspace(0, 5.0 * 2 * pi / A, 500)
#
# Hamiltonian
#
# # For interaction picture
# def H_func(t, args):
# Ht = sin(w*t)
#
# H_0_exp_m = (-1j * t * H0).expm().data
# H_0_exp_p = (1j * t * H0).expm().data
#
# H_MW_int = H_0_exp_m * Ht * H_0_exp_p
# return H_MW_int
H0 = - delta / 2.0 * sx - eps0 / 2.0 * sz
H1 = - A * sx
# define the time-dependence of the hamiltonian using the list-string format
args = {'w': w}
Ht = [H0, [H1, "sin(w*t)"]]
if not use_rwa:
output = mesolve(Ht, psi0, tlist, cops, [sm.dag() * sm], args)
else:
# Rotating Wave Approx
H_rwa = - delta / 2.0 * sx - A * sx / 2
output = mesolve(H_rwa, psi0, tlist, cops, [sx, sy, sz, sm.dag() * sm])
#
# Plots
#
# Plot appropriate expectation values
if use_rwa:
sxlist, sylist, szlist, n_q = output.expect
else:
n_q = output.expect[0]
fig, axes = plt.subplots(1, 1)
axes.plot(tlist, n_q)
axes.set_ylim([0.0, 1.1])
axes.set_xlabel('Time [ns]')
axes.set_ylabel('Occupation probability')
axes.set_title('Excitation probability of qubit')
plt.show()
# Plot Bloch Sphere
if use_rwa:
sphere = Bloch()
sphere.add_points([sxlist, sylist, szlist], meth='l')
sphere.vector_color = ['r']
sphere.add_vectors([sin(theta), 0, -cos(theta)]) # direction of eigenvector
sphere.show()
# Get Rabi decay constant by fitting to a decaying sinusoid
c, f = fit(tlist, multiply(n_q, 100)) # returns decay constant and frequency
print '\nRabi decay constant : ', c
| [
"[email protected]"
] | |
471e2834589c36d53dc9fe706289e76eb704508d | c1e513714ddd8b05cbc4a7171207b0d082d7147a | /ex049.py | cfcf9e56387cad69b115097c9664a52ba355cb2f | [] | no_license | gabrielsalesls/curso-em-video-python | b8191ef8ead4052b590551fcca6f9bcdb1de3007 | c9f004a5f02c85d09e36bf3e121fad4a9fdbb252 | refs/heads/master | 2022-11-21T17:31:33.526490 | 2020-07-22T18:01:33 | 2020-07-22T18:01:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | # Tabuada usando FOR
n = int(input('Digite o numero: '))
for c in range(1, 11):
r = n * c
print('{} x {} = {}'.format(n, c, r))
| [
"[email protected]"
] | |
cb3b7f7f61acce8ec05037cea3db5018b1e29f25 | 8e63bc0ec28d4ba693a02aa7a1aedf9a8231349c | /youtube/urls.py | aae311c687b86cecb97de55ec741fecee734fbc4 | [] | no_license | bhwong11/youtube | 1c125e5aff4d110f23940c7cdf83df86a666a61f | 37c849eb934007b6165a7044ae71e58a27d26527 | refs/heads/main | 2023-07-08T05:26:19.528079 | 2021-08-16T21:52:25 | 2021-08-16T21:52:25 | 396,986,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | """youtube URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
2e192ab31039def62663dae05e67c5467c557ee1 | 30a89ae47ca79e4ced151908f4059cd77ade30ef | /main/views.py | e80899981ed429eccd17a9e6bbdfe11110385253 | [] | no_license | harshit8858/mindful_project1_salesapp | 0bd80c40b2349fe08744dcd0625283c5b6ba4029 | 66f7c7af868518898aa6422d1b17ca9f7cf433ef | refs/heads/master | 2020-03-24T00:02:49.972583 | 2018-08-18T07:56:49 | 2018-08-18T07:56:49 | 142,269,897 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,826 | py | from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponseRedirect
from django.contrib import auth
from .forms import *
from .models import *
from .models1 import *
def index(request):
return render(request, 'main/index.html')
def user(request):
if request.user.is_authenticated:
# admin = Profile.objects.filter(user_type='salesadmin')
manager = Profile.objects.filter(user_type='salesmanager')
men = Profile.objects.filter(user_type='salesmen')
context = {
# 'admin': admin,
'manager': manager,
'men': men,
'active10': 'active',
'dropdown5': 'dropdown-container1',
}
return render(request, 'main/user.html', context)
else:
return render(request, 'main/user.html')
def user_details(request, slug):
instance = get_object_or_404(Profile, slug=slug)
context = {
'instance': instance,
'active10': 'active',
'dropdown5': 'dropdown-container1',
}
return render(request, 'main/user_details.html', context)
def signup(request):
if request.user.is_superuser:
if request.method == 'POST':
form = SignUpForm(request.POST, request.FILES)
if form.is_valid():
f = form.save()
f.refresh_from_db() # load the profile instance created by the signal
f.profile.user_type = form.cleaned_data.get('user_type')
f.profile.sale_admin = str(form.cleaned_data.get('sale_admin'))
f.profile.sale_manager = str(form.cleaned_data.get('sale_manager'))
f.profile.first_name = form.cleaned_data.get('first_name')
f.profile.last_name = form.cleaned_data.get('last_name')
f.profile.email = form.cleaned_data.get('email')
f.profile.pincode = form.cleaned_data.get('pincode')
f.profile.state = form.cleaned_data.get('state')
f.profile.city = form.cleaned_data.get('city')
f.profile.country = form.cleaned_data.get('country')
f.profile.status = form.cleaned_data.get('status')
f.profile.address = form.cleaned_data.get('address')
f.profile.profile_pic = form.cleaned_data.get('profile_pic')
f.save()
# return redirect('main:user')
return HttpResponseRedirect(f.profile.get_absolute_url())
else:
form = SignUpForm()
context = {
'form': form,
'active10': 'active',
'dropdown5': 'dropdown-container1',
}
return render(request, 'main/signup.html', context)
else:
return render(request, 'main/not_authorised.html')
def edit_user(request, slug):
instance = get_object_or_404(Profile, slug=slug)
if request.user.is_superuser:
if request.method == 'POST':
form = EditUserForm(request.POST, request.FILES, instance=instance)
if form.is_valid():
f = form.save()
f.refresh_from_db() # load the profile instance created by the signal
f.user_type = form.cleaned_data.get('user_type')
f.sale_admin = str(form.cleaned_data.get('sale_admin'))
f.sale_manager = str(form.cleaned_data.get('sale_manager'))
f.first_name = form.cleaned_data.get('first_name')
f.last_name = form.cleaned_data.get('last_name')
f.email = form.cleaned_data.get('email')
f.pincode = form.cleaned_data.get('pincode')
f.state = form.cleaned_data.get('state')
f.city = form.cleaned_data.get('city')
f.country = form.cleaned_data.get('country')
f.status = form.cleaned_data.get('status')
f.address = form.cleaned_data.get('address')
f.profile_pic = form.cleaned_data.get('profile_pic')
f.save()
# return redirect('main:user')
return HttpResponseRedirect(f.get_absolute_url())
else:
form = EditUserForm(instance=instance)
context = {
'form': form,
'active10': 'active',
'dropdown5': 'dropdown-container1',
}
return render(request, 'main/signup.html', context)
else:
return render(request, 'main/not_authorised.html')
def delete_user(request, slug):
instance = get_object_or_404(Profile, slug=slug)
instance_user = User.objects.get(username=instance.user.username)
instance.delete()
# print(instance_user)
instance_user.delete()
return redirect('main:user')
def login(request):
if request.user.is_authenticated:
return render(request, 'main/index.html')
else:
return render(request, 'login.html')
def auth_check(request):
username = request.POST['username']
password = request.POST['password']
user = auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
return redirect('main:index')
else:
return redirect('main:invalid')
def invalid(request):
return render(request, 'main/invalid.html')
def logout(request):
auth.logout(request)
return redirect('main:login')
def customer(request):
if request.user.is_superuser:
if request.method == 'POST':
form = CustomerForm(request.POST)
if form.is_valid():
f = form.save(commit=False)
f.sale_manager = str(form.cleaned_data.get('sale_manager'))
f.save()
# return redirect('main:customer_list')
return HttpResponseRedirect(f.get_absolute_url1())
else:
form = CustomerForm()
context = {
'form': form,
'active5': 'active',
'dropdown2': 'dropdown-container1',
}
return render(request, 'main/customer.html', context)
else:
return render(request, 'main/not_authorised.html')
def customer_list(request):
sale_manager = Profile.objects.filter(user_type='salesmanager')
customer = Customer.objects.all()
context = {
'sale_manager': sale_manager,
'customer': customer,
'active5': 'active',
'dropdown2': 'dropdown-container1',
}
return render(request, 'main/customer_list.html', context)
def customer_details(request, slug1):
instance = get_object_or_404(Customer, slug1=slug1)
context = {
'instance': instance,
'active5': 'active',
'dropdown2': 'dropdown-container1',
}
return render(request, 'main/customer_details.html', context)
def edit_customer(request, slug1):
instance = get_object_or_404(Customer, slug1=slug1)
if request.user.is_superuser:
if request.method == 'POST':
form = CustomerForm(request.POST,instance=instance)
if form.is_valid():
f = form.save(commit=False)
f.sale_manager = str(form.cleaned_data.get('sale_manager'))
f.save()
# return redirect('main:customer_list')
return HttpResponseRedirect(f.get_absolute_url1())
else:
form = CustomerForm(instance=instance)
context = {
'form': form,
'active5': 'active',
'dropdown2': 'dropdown-container1',
}
return render(request, 'main/customer.html', context)
else:
return render(request, 'main/not_authorised.html')
def delete_customer(request, slug1):
instance = get_object_or_404(Customer, slug1=slug1)
instance.delete()
return redirect('main:customer_list')
def reports(request):
context = {
'active9': 'active',
}
return render(request, 'main/reports.html', context)
def dashboard(request):
context = {
'active1': 'active',
}
return render(request, 'main/dashboard.html', context)
def location_tracking(request):
context = {
'active8': 'active',
}
return render(request, 'main/location_tracking.html', context)
def company_profile(request):
cp = Company_Profile.objects.all()
context = {
'cp': cp,
'active11': 'active',
}
if cp.count() == 0:
if request.user.is_superuser:
return redirect('main:company_profile_add')
else:
return render(request, 'main/no_data_found.html')
else:
return render(request, 'main/company_profile.html', context)
def company_profile_add(requset):
if requset.method == 'POST':
form = CompanyProfileEditForm(requset.POST)
if form.is_valid():
form.save()
return redirect('main:company_profile')
else:
form = CompanyProfileEditForm()
context = {
'form': form,
'active11': 'active',
}
if requset.user.is_superuser:
return render(requset, 'main/company_profile_add.html', context)
else:
return render(requset, 'main/not_authorised.html')
def company_profile_edit(requset, id):
instance = Company_Profile.objects.get(id=id)
if requset.method == 'POST':
form = CompanyProfileEditForm(requset.POST,instance=instance)
if form.is_valid():
form.save()
return redirect('main:company_profile')
else:
form = CompanyProfileEditForm(instance=instance)
context = {
'form': form,
'instance': instance,
'active11': 'active',
}
if requset.user.is_superuser:
return render(requset, 'main/company_profile_add.html', context)
else:
return render(requset, 'main/not_authorised.html') | [
"[email protected]"
] | |
a5ef6335a57e0a0eee90829996a707cf148fbd1a | b6f760800f05c70e1d517440240745caab017434 | /students_db/__init__.py | c3b0e549ee176361e30185218bbc4cf7799d6703 | [] | no_license | denisjovic/students_db | 59981403a0d2038e47e7c25c0fbcd0bc873a07e1 | 23ae4b96dd8533163caf1bcd6c6586749e00e483 | refs/heads/master | 2023-02-18T21:36:36.995102 | 2021-01-14T09:26:55 | 2021-01-14T09:26:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
project_dir = os.path.dirname(os.path.abspath(__file__))
database_file = "sqlite:///{}".format(
os.path.join(project_dir, "students_base.db"))
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = database_file
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["TEMPLATES_AUTO_RELOAD"] = True
app.secret_key = 'some random secret key'
login = LoginManager(app)
login.login_view = "login"
login.login_message_category = "info"
db = SQLAlchemy(app)
# this must go on the bottom
from students_db import routes
| [
"[email protected]"
] | |
7204c6289a985863bc40c83d628ec9d380247070 | 7c4e7aaa211400361b5ae6f11738703d8340db95 | /game_final.py | 31a0836d354966fe3934ba9bb636fe45dcd49648 | [] | no_license | ritvik03/Baby-I-am-A-Fighterfighter | 021c33ee81a85d6dfca2501158dd54de1286ee8f | 0b1956089d4291f19294bd98d73057ef04f55a61 | refs/heads/master | 2020-09-07T15:17:05.011412 | 2019-11-12T14:35:56 | 2019-11-12T14:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,882 | py | """
Sample Python/Pygame Programs
Simpson College Computer Science
http://programarcadegames.com/
http://simpson.edu/computer-science/
From:
http://programarcadegames.com/python_examples/f.php?file=maze_runner.py
Explanation video: http://youtu.be/5-SbFanyUkQ
Part of a series:
http://programarcadegames.com/python_examples/f.php?file=move_with_walls_example.py
http://programarcadegames.com/python_examples/f.php?file=maze_runner.py
http://programarcadegames.com/python_examples/f.php?file=platform_jumper.py
http://programarcadegames.com/python_examples/f.php?file=platform_scroller.py
http://programarcadegames.com/python_examples/f.php?file=platform_moving.py
"""
import pygame
import random
import numpy as np
import matplotlib.pyplot as plt
import argparse
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
YELLOW = (255,255,0)
RED = (255, 0, 0)
PURPLE = (255, 0, 255)
class Wall(pygame.sprite.Sprite):
"""This class represents the bar at the bottom that the player controls """
def __init__(self, x, y, width, height, color = GREEN):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Make a BLUE wall, of the size specified in the parameters
# print(width)
self.image = pygame.Surface([width, height])
self.image.fill(color)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class Fire(pygame.sprite.Sprite):
"""This class represents the bar at the bottom that the player controls """
def __init__(self, x, y, width, height, color):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Make a BLUE wall, of the size specified in the parameters
self.image = pygame.Surface([width, height])
self.image.fill(color)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class Player(pygame.sprite.Sprite):
""" This class represents the bar at the bottom that the
player controls """
# Set speed vector
change_x = 0
change_y = 0
def __init__(self, x, y):
""" Constructor function """
# Call the parent's constructor
super().__init__()
# Set height, width
self.image = pygame.Surface([15, 15])
self.image.fill(WHITE)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
def changespeed(self, x, y):
""" Change the speed of the player. Called with a keypress. """
self.change_x += x
self.change_y += y
def move(self, walls):
""" Find a new position for the player """
# Move left/right
self.rect.x += self.change_x
# Did this update cause us to hit a wall?
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
# If we are moving right, set our right side to the left side of
# the item we hit
if self.change_x > 0:
self.rect.right = block.rect.left
else:
# Otherwise if we are moving left, do the opposite.
self.rect.left = block.rect.right
# Move up/down
self.rect.y += self.change_y
# Check and see if we hit anything
block_hit_list = pygame.sprite.spritecollide(self, walls, False)
for block in block_hit_list:
# Reset our position based on the top/bottom of the object.
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
class Room(object):
""" Base class for all rooms. """
# Each room has a list of walls, and of enemy sprites.
wall_list = None
enemy_sprites = None
fire_list = None
room_right = None
room_left = None
room_up = None
room_down = None
def __init__(self):
""" Constructor, create our lists. """
self.wall_list = pygame.sprite.Group()
self.enemy_sprites = pygame.sprite.Group()
self.fire_list = pygame.sprite.Group()
self.fire_area = np.zeros((800,800))
'''
self.room_down = None
self.room_up = None
self.room_left = None
self.room_right = None
'''
class Room1(Room):
"""This creates all the walls in room 1"""
def __init__(self):
super().__init__()
# Make the walls. (x_pos, y_pos, width, height)
# This is a list of walls. Each is in the form [x, y, width, height]
walls = [[0, 0, 20, 250, WHITE],
[0, 350, 20, 250, WHITE],
[780, 0, 20, 250, WHITE],
[780, 350, 20, 250, WHITE],
[20, 0, 760, 20, WHITE],
[20, 580, 760, 20, WHITE],
[390, 50, 20, 500, BLUE]
]
fire = [[100,100,50,50, YELLOW]]
# Loop through the list. Create the wall, add it to the list
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
for item in fire:
fire = Fire(item[0], item[1], item[2], item[3], item[4])
self.fire_list.add(fire)
self.room_right = 2
self.room_left = 5#4
class Room2(Room):
"""This creates all the walls in room 2"""
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, RED],
[0, 350, 20, 250, RED],
[780, 0, 20, 250, RED],
[780, 350, 20, 250, RED],
[20, 0, 760, 20, RED],
[20, 580, 760, 20, RED],
[190, 50, 20, 500, GREEN],
[590, 50, 20, 500, GREEN]
]
walls = [[0, 0, 20, 250, RED],
[0, 350, 20, 250, RED],
[780, 0, 20, 250, RED],
[780, 350, 20, 250, RED],
[20, 0, 760, 20, RED],
[20, 580, 760, 20, RED],
[190, 50, 20, 500, GREEN],
[590, 50, 20, 500, GREEN]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
self.room_right = 3
self.room_left = 1
class Room3(Room):
"""This creates all the walls in room 3"""
def __init__(self):
super().__init__()
walls = [[0, 0, 20, 250, PURPLE],
[0, 350, 20, 250, PURPLE],
[780, 0, 20, 250, PURPLE],
[780, 350, 20, 250, PURPLE],
[20, 0, 760, 20, PURPLE],
[20, 580, 760, 20, PURPLE]
]
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
for x in range(100, 800, 100):
for y in range(50, 451, 300):
wall = Wall(x, y, 20, 200, RED)
self.wall_list.add(wall)
for x in range(150, 700, 100):
wall = Wall(x, 200, 20, 200, WHITE)
self.wall_list.add(wall)
self.room_right = 4
self.room_left = 2
class Room4(Room):
"""This creates all the walls in room 4"""
def __init__(self):
super().__init__()
# Make the walls. (x_pos, y_pos, width, height)
# This is a list of walls. Each is in the form [x, y, width, height]
walls = [[0, 0, 20, 250, WHITE],
[0, 350, 20, 250, WHITE],
[780, 0, 20, 250, WHITE],
[780, 350, 20, 250, WHITE],
[20, 0, 760, 20, WHITE],
[20, 580, 760, 20, WHITE],
[390, 50, 20, 500, BLUE],
[200, 50, 20, 500, BLUE],
[500, 100, 20, 500, BLUE]
]
# Loop through the list. Create the wall, add it to the list
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
self.room_right = 5#1
self.room_left = 3
class customRoom(Room):
"Creates a custom room"
def __init__(self,roomName):
self.roomName = roomName
super().__init__()
# Make the walls. (x_pos, y_pos, width, height)
# This is a list of walls. Each is in the form [x, y, width, height]
walls = [[0, 0, 20, 250, WHITE],
[0, 350, 20, 250, WHITE],
[780, 0, 20, 250, WHITE],
[780, 350, 20, 250, WHITE],
[20, 0, 760, 20, WHITE],
[20, 580, 760, 20, WHITE],
]
# obs = open(roomName).readlines()
# import ast
# with open(roomName, 'r') as f:
# obs = ast.literal_eval(f.read())
import csv
with open(roomName, 'r') as myfile:
reader = csv.reader(myfile, quotechar='"')
obs=[]
for row in reader:
obs.append(row)
# Loop through the list. Create the wall, add it to the list
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], item[4])
self.wall_list.add(wall)
# Loop through the list. Create the wall, add it to the list
for item in obs:
print(type(item[0]))
wall = Wall(int(item[0]), int(item[1]), int(item[2]), int(item[3]), GREEN)
self.wall_list.add(wall)
self.room_right = 1
self.room_left = 4
def fire_spread(rooms):
size_fire = 10
for room_ind in range(0,len(rooms)):
fire_room = rooms[room_ind]
rect_xs = [i.rect.x for i in fire_room.fire_list]
rect_ys = [i.rect.y for i in fire_room.fire_list]
if len(rect_xs) > 0:
rects = [(rect_xs[i],rect_ys[i]) for i in range(0,len(rect_xs))]
while(1):
rand_ind = random.randrange(0, len(rect_xs))
fire_y = rect_ys[rand_ind]
fire_x = rect_xs[rand_ind]
if(fire_x + size_fire, fire_y + size_fire) not in rects:
break
if(fire_x + size_fire, fire_y - size_fire) not in rects:
break
if(fire_x - size_fire, fire_y + size_fire) not in rects:
break
if(fire_x - size_fire, fire_y - size_fire) not in rects:
break
if(fire_x, fire_y + size_fire) not in rects:
break
if(fire_x, fire_y - size_fire) not in rects:
break
if(fire_x + size_fire, fire_y) not in rects:
break
if(fire_x - size_fire, fire_y) not in rects:
break
if fire_x + size_fire < 800 and fire_x > 0 and fire_y + size_fire < 800 and fire_y - size_fire > 0:
fire_room.fire_list.add(Fire(fire_x + size_fire , fire_y + size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x + size_fire , fire_y - size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x - size_fire , fire_y + size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x - size_fire , fire_y - size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x + 0 , fire_y + size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x + 0 , fire_y - size_fire, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x + size_fire, fire_y + 0, size_fire, size_fire, YELLOW))
fire_room.fire_list.add(Fire(fire_x- size_fire , fire_y + 0, size_fire, size_fire, YELLOW))
fire_room.fire_area[(fire_y - size_fire):(fire_y + size_fire),(fire_x - size_fire):(fire_x + size_fire)] = 1
if fire_x - size_fire <= 0 and fire_y + size_fire < 800 and fire_y - size_fire > 0:
rooms[fire_room.room_left-1].fire_list.add(Fire(774 , fire_y, size_fire, size_fire, YELLOW))
if fire_x - size_fire >= 780 and fire_y + size_fire < 800 and fire_y - size_fire > 0:
rooms[fire_room.room_right-1].fire_list.add(Fire(0 , fire_y, size_fire, size_fire, YELLOW))
def argumentParsing():
ap = argparse.ArgumentParser()
# ap.add_argument("-i", "--image", required=False, help="Path to the image", default="bg.jpg")
ap.add_argument("-c", "--customRoom", required=False, help="CSV file where room details are saved (Room Name)", default="room.csv")
args = vars(ap.parse_args())
def main():
ap = argparse.ArgumentParser()
# ap.add_argument("-i", "--image", required=False, help="Path to the image", default="bg.jpg")
ap.add_argument("-c", "--customRoom", required=False, help="CSV file where room details are saved (Room Name)", default="room.csv")
args = vars(ap.parse_args())
""" Main Program """
# Call this function so the Pygame library can initialize itself
pygame.init()
# Create an 800x600 sized screen
screen = pygame.display.set_mode([800, 600])
# Set the title of the window
pygame.display.set_caption('Maze Runner')
# Create the player paddle object
health = 100
player = Player(50, 50)
movingsprites = pygame.sprite.Group()
movingsprites.add(player)
rooms = []
room = Room1()
rooms.append(room)
room = Room2()
rooms.append(room)
room = Room3()
rooms.append(room)
room = Room4()
rooms.append(room)
room = customRoom(args["customRoom"])
rooms.append(room)
current_room_no = 0
current_room = rooms[current_room_no]
clock = pygame.time.Clock()
done = False
myfont = pygame.font.SysFont("monospace",16)
while not done:
# --- Event Processing ---
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
player.changespeed(-5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, -5)
if event.key == pygame.K_DOWN:
player.changespeed(0, 5)
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
player.changespeed(5, 0)
if event.key == pygame.K_RIGHT:
player.changespeed(-5, 0)
if event.key == pygame.K_UP:
player.changespeed(0, 5)
if event.key == pygame.K_DOWN:
player.changespeed(0, -5)
# --- Game Logic ---
player.move(current_room.wall_list)
if player.rect.x < -15:
current_room_no = current_room.room_left -1
current_room = rooms[current_room_no]
player.rect.x = 790
'''
if current_room_no == 0:
current_room_no = 2
current_room = rooms[current_room_no]
player.rect.x = 790
elif current_room_no == 2:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 790
elif current_room_no == 3:
current_room_no = 2
current_room = rooms[current_room_no]
player.rect.x = 790
else:
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 790
'''
if player.rect.x > 801:
current_room_no = current_room.room_right-1
current_room = rooms[current_room_no]
player.rect.x = 0
'''
if current_room_no == 0:
current_room_no = 1
current_room = rooms[current_room_no]
player.rect.x = 0
elif current_room_no == 1:
current_room_no = 2
current_room = rooms[current_room_no]
player.rect.x = 0
elif current_room_no == 2:
current_room_no = 3
current_room = rooms[current_room_no]
player.rect.x = 0
else:
current_room_no = 0
current_room = rooms[current_room_no]
player.rect.x = 0
'''
# --- Drawing ---
screen.fill(BLACK)
movingsprites.draw(screen)
current_room.wall_list.draw(screen)
current_room.fire_list.draw(screen)
print(np.sum(current_room.fire_area[player.rect.y:player.rect.y+15,player.rect.x:player.rect.x+15]) )
#plt.imshow(current_room.fire_area)
#plt.show()
if np.sum(current_room.fire_area[player.rect.y:player.rect.y+15,player.rect.x:player.rect.x+15]) > 0:
health = health - 1
print(health)
'''
fire_x = random.randrange(0, 800)
fire_y = random.randrange(0, 800)
fire = Fire(fire_x , fire_y , 25, 25, YELLOW)
current_room.fire_list.add(fire)
'''
fire_spread(rooms)
scoretext = myfont.render("Health = " + str(health), 2, (255,0,0))
screen.blit(scoretext, (5,10))
pygame.display.flip()
clock.tick(60)
pygame.quit()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
2ed1178cdd6efabe3fad3f746c8296c53dd4234a | 8cf7a165418330e9620608a60e1d84cb1c1f6036 | /app/migrations/0002_auto_20200717_1743.py | 0ca4c743bb8a0d2783f37f57751b4bae7d53d670 | [] | no_license | nicolasteodosio/pyfeedrss | c1352ccdcd775251a0242e36d904697844a74a22 | b45e57ec093d4c270d02487e4c159eebbb29c966 | refs/heads/master | 2023-08-09T12:36:19.857785 | 2020-08-05T04:27:25 | 2020-08-05T04:27:25 | 279,096,743 | 0 | 0 | null | 2021-09-22T19:25:52 | 2020-07-12T15:49:02 | Python | UTF-8 | Python | false | false | 465 | py | # Generated by Django 3.0.8 on 2020-07-17 17:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("app", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="userfollowfeed",
name="disabled_at",
field=models.DateTimeField(null=True, verbose_name="Disabled at"),
),
migrations.DeleteModel(name="UserRelItem",),
]
| [
"[email protected]"
] | |
a8bb39da8875722f280db71679bfc2ae28a79e25 | 3715cc9bd1379aaba1b8ca5d791342fb58765982 | /book/migrations/0002_category_slug.py | 02d6e4cd51902dfe54626040a626cbab947ba4bc | [] | no_license | e-harsley/bookstore | c99edc0a36cda1347411fd9ec5bcf71a171086c2 | a6f7be5cec15de328bac278e7ffffb007f46b044 | refs/heads/master | 2020-07-30T11:45:39.444276 | 2019-09-22T22:19:22 | 2019-09-22T22:19:22 | 210,222,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | # Generated by Django 2.2.5 on 2019-09-18 01:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='category',
name='slug',
field=models.SlugField(blank=True, max_length=250),
),
]
| [
"[email protected]"
] | |
f800fe38d134de835e5fd6b6c7afe7e28cc54460 | 9615307055f69e2226bb6540a5390b9c3875fd11 | /src/main.py | 7a98233a2573e7d38d2d77e13a930dfdae78af77 | [] | no_license | Admirahalili/TP04-tris-et-recherches-solutions | 6974337592a60faa8359c8e37fdb3d1fff458830 | 27d3952e0495216034bf788cba6b93a783a45c46 | refs/heads/master | 2023-01-15T06:08:13.988415 | 2020-11-20T08:26:40 | 2020-11-20T08:26:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,153 | py | import random
import time
from src.metier.LinkedList import LinkedList
# Afin d'avoir tout le temps les mêmes nombres aléatoires
random.seed(10)
def is_sorted(lk: LinkedList):
n = lk.get(0)
for i in range(1, lk.size()):
j = lk.get(i)
if n > j:
return False
n = j
return True
def current_time():
return int(round(time.time() * 1000))
def populate(lk: LinkedList, n: int):
for i in range(0, n):
lk.add(random.randint(0, 1000))
if __name__ == '__main__':
# Vos tests ici
lk = LinkedList()
n: int = 1000
print("========== BAD INSERTION SORT ==========")
lk.clear()
populate(lk, n)
t = current_time()
lk.bad_insertion_sort()
print("Temps : {} ms\n".format(current_time() - t))
print("Tableau trié ? ", is_sorted(lk))
print("========== INSERTION SORT ==========")
lk.clear()
populate(lk, n)
t = current_time()
lk.insertion_sort()
print("Temps : {} ms\n".format(current_time() - t))
print("Tableau trié ? ", is_sorted(lk))
print("========== BAD MERGE SORT ==========")
lk.clear()
populate(lk, n)
t = current_time()
lk.bad_merge_sort()
print("Temps : {} ms\n".format(current_time() - t))
print("Tableau trié ? ", is_sorted(lk))
print("========== MERGE SORT ==========")
lk.clear()
populate(lk, n)
t = current_time()
lk.merge_sort()
print("Temps : {} ms\n".format(current_time() - t))
print("Tableau trié ? ", is_sorted(lk))
print("========== BEST MERGE SORT ==========")
lk.clear()
populate(lk, n)
t = current_time()
lk.best_merge_sort()
print("Temps : {} ms\n".format(current_time() - t))
print("Tableau triée ? ", is_sorted(lk))
n = 100000
lk.clear()
populate(lk, n)
value_to_search = lk.get(random.randint(0, lk.size() - 1))
print("========== LINEAR SEARCH (on unsorted list) ==========")
t = current_time()
index = lk.index_of(value_to_search)
print("Valeur à rechercher: {}".format(value_to_search))
print("Indice trouvé: {}".format(index))
print("Valeur équivalente à l'indice trouvé: {}".format(lk.get(index)))
print("Temps : {} ms\n".format(current_time() - t))
lk.clear()
for i in range(n):
lk.add(i)
value_to_search = lk.get(random.randint(0, lk.size() - 1))
print("========== LINEAR SEARCH (on sorted list) ==========")
t = current_time()
index = lk.index_of(value_to_search)
print("Valeur à rechercher: {}".format(value_to_search))
print("Indice trouvé: {}".format(index))
print("Valeur équivalente à l'indice trouvé: {}".format(lk.get(index)))
print("Temps : {} ms\n".format(current_time() - t))
print("========== DICHOTOMIC SEARCH (on sorted list, of course) ==========")
t = current_time()
index = lk.dichotomic_search(value_to_search)
print("Valeur à rechercher: {}".format(value_to_search))
print("Indice trouvé: {}".format(index))
print("Valeur équivalente à l'indice trouvé: {}".format(lk.get(index)))
print("Temps : {} ms\n".format(current_time() - t))
print()
| [
"[email protected]"
] | |
d64ccd406c3d824f483f1d275df27a7100b9be5c | 59ca38358bafc2d15a765ab2d252141a36029fbb | /python_starter_homework/002_variables_and_data_types/002_homework_1.py | 4d26ee05c1254e684e38033e81349f75f65f13f9 | [] | no_license | Nilvalus/fawor | bfe3ebd0cade88533e8de3fbaddd87f6ca7b36d1 | 9b41d424c40f39e4ab542dec811148bc6a0e4539 | refs/heads/master | 2023-04-02T00:32:08.639146 | 2021-04-15T14:30:56 | 2021-04-15T14:30:56 | 357,622,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | word_1 = input('Enter word_1: ')
word_2 = input('Enter word_2: ')
print(word_1, word_2, sep=',')
| [
"[email protected]"
] | |
074e63b7239fd2b5268e020e79b4ad67bb87cbf1 | 78065c7d3446d1692b7e6580b1e2a37ed5682a09 | /advent07b.py | 5d08c345cfe1a17890901889c371b3f84f8f7aa1 | [] | no_license | RobertMarch/AdventOfCode2020 | 6c561c04b70ce8567660521d7aeb3f476ed59e82 | 3bb03542d4497629fdb161108418edf862c8a2a6 | refs/heads/main | 2023-02-03T22:22:55.260252 | 2020-12-25T08:51:20 | 2020-12-25T08:51:20 | 317,649,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | from TestRunner import run_tests
from inputs.input07 import puzzle_input, test_cases_b
def solve(input):
parent_to_children = {}
for line in input.split('\n'):
if len(line.strip()) > 0:
parent, children = line.split(' contain ')
parent_colour = ' '.join(parent.split(' ')[0:2])
parent_to_children[parent_colour] = []
if children == 'no other bags.':
continue
children.replace('.', '')
for child in children.split(', '):
words = child.split(' ')
quantity = int(words[0])
colour = ' '.join(words[1:3])
parent_to_children[parent_colour].append([quantity, colour])
return count_child_bags("shiny gold", parent_to_children)
def count_child_bags(colour, parent_to_children):
children = parent_to_children[colour]
return sum([c_num * (1 + count_child_bags(c_colour, parent_to_children)) for [c_num, c_colour] in children])
if __name__ == "__main__":
run_tests(test_cases_b, solve)
print(solve(puzzle_input))
| [
"[email protected]"
] | |
6cc874cdde8d748e0b13f3429e6b7ab39dd99f15 | 583a20c2d091fff051dce5b70d9ed3885ed7b889 | /abnormal.py | 19ec072b5b4a2dadb3bbf045b76ab107908d9d59 | [] | no_license | SharonFei/python | 132770b4a93d64928cd2ef84cec7f1a0641e83de | 3348046cdccaaf99144ca1293f2dcee9fe822dcc | refs/heads/master | 2020-04-05T00:18:46.390095 | 2018-11-18T15:23:10 | 2018-11-18T15:23:10 | 156,392,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | try:
open("abc.text",'r')
except FileNotFoundError:
print("异常了!")
| [
"[email protected]"
] | |
c003c4c28af9344b6c4375fb8423478434fe9cb8 | cfa851dd7dfe63609c42eede76011cc02f553289 | /pyxsim/utils.py | dc616ce0f45188316f048b6bfd4314383c7e797d | [
"BSD-3-Clause"
] | permissive | Joeybraspenning/pyxsim | 8be218ac88cf0607534fe2b10e58d56a7f438995 | 6e06bd87c721580fa7fea2d363a2cc4c6b35ca70 | refs/heads/main | 2023-08-27T02:55:41.048236 | 2021-09-16T20:23:30 | 2021-09-16T20:23:30 | 410,861,487 | 0 | 0 | NOASSERTION | 2021-09-27T11:44:11 | 2021-09-27T11:44:11 | null | UTF-8 | Python | false | false | 4,830 | py | from unyt import unyt_array, unyt_quantity
from astropy.units import Quantity
import logging
from more_itertools import always_iterable
import numpy as np
pyxsimLogger = logging.getLogger("pyxsim")
ufstring = "%(name)-3s : [%(levelname)-9s] %(asctime)s %(message)s"
cfstring = "%(name)-3s : [%(levelname)-18s] %(asctime)s %(message)s"
pyxsim_sh = logging.StreamHandler()
# create formatter and add it to the handlers
formatter = logging.Formatter(ufstring)
pyxsim_sh.setFormatter(formatter)
# add the handler to the logger
pyxsimLogger.addHandler(pyxsim_sh)
pyxsimLogger.setLevel('INFO')
pyxsimLogger.propagate = False
mylog = pyxsimLogger
def parse_value(value, default_units, ds=None):
if isinstance(value, Quantity):
value = unyt_quantity.from_astropy(value)
if ds is None:
quan = unyt_quantity
else:
quan = ds.quan
if isinstance(value, unyt_quantity):
return quan(value.v, value.units).in_units(default_units)
elif isinstance(value, tuple):
return quan(value[0], value[1]).in_units(default_units)
else:
return quan(value, default_units)
def isunitful(a):
if isinstance(a, (Quantity, unyt_array)):
return True
elif isinstance(a, tuple):
try:
unyt_array(a[0], a[1])
return True
except:
pass
return False
def ensure_list(obj):
return list(always_iterable(obj))
def validate_parameters(first, second, skip=None):
if skip is None:
skip = []
keys1 = list(first.keys())
keys2 = list(second.keys())
keys1.sort()
keys2.sort()
if keys1 != keys2:
raise RuntimeError("The two inputs do not have the same parameters!")
for k1, k2 in zip(keys1, keys2):
if k1 not in skip:
v1 = first[k1][()]
v2 = first[k2][()]
if isinstance(v1, (str, bytes)) or isinstance(v2, (str, bytes)):
check_equal = v1 == v2
else:
check_equal = np.allclose(np.array(v1), np.array(v2), rtol=0.0, atol=1.0e-10)
if not check_equal:
raise RuntimeError(f"The values for the parameter '{k1}' in the two inputs"
f" are not identical ({v1} vs. {v2})!")
def merge_files(input_files, output_file, overwrite=False,
add_exposure_times=False):
r"""
Helper function for merging PhotonList or EventList HDF5 files.
Parameters
----------
input_files : list of strings
List of filenames that will be merged together.
output_file : string
Name of the merged file to be outputted.
overwrite : boolean, default False
If a the output file already exists, set this to True to
overwrite it.
add_exposure_times : boolean, default False
If set to True, exposure times will be added together. Otherwise,
the exposure times of all of the files must be the same.
Examples
--------
>>> from pyxsim import merge_files
>>> merge_files(["events_0.h5","events_1.h5","events_3.h5"], "events.h5",
... overwrite=True, add_exposure_times=True)
Notes
-----
Currently, to merge files it is mandated that all of the parameters have the
same values, with the exception of the exposure time parameter "exp_time". If
add_exposure_times=False, the maximum exposure time will be used.
"""
from collections import defaultdict
from pathlib import Path
import h5py
if Path(output_file).exists() and not overwrite:
raise IOError(f"Cannot overwrite existing file {output_file}. "
"If you want to do this, set overwrite=True.")
f_in = h5py.File(input_files[0], "r")
f_out = h5py.File(output_file, "w")
exp_time_key = ""
p_out = f_out.create_group("parameters")
for key, param in f_in["parameters"].items():
if key.endswith("exp_time"):
exp_time_key = key
else:
p_out[key] = param[()]
skip = [exp_time_key] if add_exposure_times else []
for fn in input_files[1:]:
f = h5py.File(fn, "r")
validate_parameters(f_in["parameters"], f["parameters"], skip=skip)
f.close()
f_in.close()
data = defaultdict(list)
tot_exp_time = 0.0
for i, fn in enumerate(input_files):
f = h5py.File(fn, "r")
if add_exposure_times:
tot_exp_time += f["/parameters"][exp_time_key][()]
else:
tot_exp_time = max(tot_exp_time, f["/parameters"][exp_time_key][()])
for key in f["/data"]:
data[key].append(f["/data"][key][:])
f.close()
p_out[exp_time_key] = tot_exp_time
d = f_out.create_group("data")
for k in data:
d.create_dataset(k, data=np.concatenate(data[k]))
f_out.close()
| [
"[email protected]"
] | |
4b5b0c732f138117397dd992edd522768a7d5e05 | 945929aca88ab7614e16284756a118c71ca87259 | /yu_c/Chapter11_ClassandObjects/P38_test6.py | d7a904aa713d94e991fe5749eddaa34fa1114668 | [] | no_license | XIAOQUANHE/pythontest | 344c221b688f80e0ef5a3d3d71ffa48759e09ac1 | 8a02508fd66b13c6c41a689a65adac53aeb7361a | refs/heads/master | 2023-01-24T22:25:57.278411 | 2020-11-30T05:43:31 | 2020-11-30T05:43:31 | 290,959,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | class A():
def __init__(self):
print("进入A…")
print("离开A…")
class B(A):
def __init__(self):
print("进入B…")
super().__init__()
print("离开B…")
class C(A):
def __init__(self):
print("进入C…")
super().__init__()
print("离开C…")
class D(B,C):
def __init__(self):
print("进入D…")
super().__init__()
print("离开D…")
print(D.__mro__)
d = D()
| [
"[email protected]"
] | |
30a5df9a0c5ccdec99a21ea9caf6ae1df5159373 | 608f6a465f280ada279527076ed73aa83db05ebe | /ed_moss_app/urls.py | 1d0976510351c08bf2291f79ededb1cc70b41268 | [] | no_license | mosster/ed-moss-app | 00a67380afe2630e9f0185aeacff93a09b0abaae | 6b4a479e715aa7dc2df48f28513998d442612184 | refs/heads/main | 2023-05-08T09:12:33.882494 | 2021-06-02T16:12:57 | 2021-06-02T16:12:57 | 373,230,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | """ed_moss_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.views.decorators.csrf import csrf_exempt
from graphene_django.views import GraphQLView
from backend.schema import schema
urlpatterns = [
path('admin/', admin.site.urls),
path('graphql', csrf_exempt(GraphQLView.as_view(graphiql=True, schema=schema))),
]
| [
"[email protected]"
] | |
004a3a90a0434cf68ce7b53cd91acd1970560753 | c3426bb7730c3783106dda4ec4eb04be591c3c83 | /lodky_animacia.py | fea68f79b8995677365ba4393d40a56beed96226 | [] | no_license | TerezaVranova/kvinta-informatika | 898c257ec2fe3dc4817ce7bd5bf33160d1b3b35f | d4841c7255ff48a1158e3f9e2207602dce765512 | refs/heads/master | 2020-08-26T13:38:51.928045 | 2020-01-08T10:09:59 | 2020-01-08T10:09:59 | 217,028,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,523 | py | import tkinter
canvas=tkinter.Canvas(width=800,height=600,background="white")
canvas.pack()
import random
def more():
canvas.create_rectangle(0,300,800,600,fill="blue")
def mesiac(x=500,y=random.randint(50,170),pozadie="white",farba="yellow",r=40):
canvas.create_oval(x-r,y-r,x+r,y+r,fill=farba,outline=farba)
canvas.create_oval(x+r*0.65-r,y-r,x+r*0.65+r,y+r,fill=pozadie,outline=pozadie)
def odraz(x=500,y=random.randint(50,170),r=40):
mesiac(x,y,"white","yellow",r)
mesiac(x,600-y,pozadie="blue",farba="yellow",r=40)
def vlajka(x,y,farba="darkgreen"):
canvas.create_oval(x,y,x+300,y+300,fill="brown")
canvas.create_line(x+150,50,x+150,y)
canvas.create_rectangle(x+150,50,x+300,150,fill=farba)
def mesiac_obrateny(x,y,pozadie="white",farba="yellow",r=50):
canvas.create_oval(x-r,y-r,x+r,y+r,fill=farba,outline=farba)
canvas.create_oval(x-r*0.65-r,y-r,x-r*0.65+r,y+r,fill=pozadie,outline=pozadie)
def logo(x,y,r=50,farba="light sky blue",pozadie="red"):
mesiac(x+r*1.125,y,pozadie,farba,r)
mesiac_obrateny(x-r*1.125,y,pozadie,farba,r)
def lodka(x,y,velkost):
a=10
b=100
r=8
canvas.create_rectangle(x,y,x+velkost*a,y+velkost*b,fill="brown",outline="brown")
canvas.create_polygon(x+velkost*a/2,y,x+velkost*3*a,y+velkost*b/2,x+velkost*a/2,y+velkost*b*2/3,fill="white",outline="black")
canvas.create_polygon(x-velkost*b/2,y+velkost*b*3/4,x+velkost*b/2+velkost*a,y+velkost*b*3/4,x+velkost*b/4+velkost*a,y+velkost*b,x-velkost*b/4,y+velkost*b,fill="saddle brown",outline="black")
logo(x+velkost*a/2,y+velkost*b*7/8,r*velkost,farba="light sky blue",pozadie="saddle brown")
def flotila():
a=0
x=500-100*a
for a in range(1,4):
x=500-100*a
lodka(x,200,a)
def obraz():
vlajka(-50,275,"darkgreen")
vlajka(480,275,"red")
more()
mesiac(175,100,"darkgreen","red",30)
odraz()
logo(705,100,20,"light sky blue","red")
#flotila()
def mesiacmesiac():
y=300
for i in range(1000):
canvas.delete("all")
obraz()
odraz(500,y,40)
y-=1
canvas.update()
canvas.after(10)
#mesiacmesiac()
def lodky():
m=500
n=400
o=300
for i in range(1,1001):
canvas.delete("all")
obraz()
lodka(o,200,1)
lodka(n,200,2)
lodka(m,200,3)
m+=10
n+=8
o+=6
canvas.update()
canvas.after(10)
lodky()
| [
"[email protected]"
] | |
012eac1546e62e0cae2826c9ab332fc30f68b114 | eeb6b1a11302be76874a01c39142f0904367c2b3 | /src/core/engine/state.py | a29ab1c2437dfca431aeb4bc67f38ebb155ef805 | [] | no_license | ArtemUgrimov/SlotServer | 90f54bbca47ab56c93be221a249782fed52ad030 | aaee6c9d157d4df35af89b2f27b4cd306ebdbb2d | refs/heads/master | 2021-09-19T07:06:55.906991 | 2018-07-24T15:08:08 | 2018-07-24T15:08:08 | 106,122,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py |
class State:
def __init__(self, state_name):
self.state_name = state_name
def on_enter(self):
pass
def on_exit(self):
pass | [
"[email protected]"
] | |
411730ae49148b1dca43cfcf1fb1120bba3265b4 | 3a2e01fe2763bb44922b4df6e342ec9ea3551868 | /concrete.py | f68e4a172dc99876dafda5ca45e3da4347851f75 | [] | no_license | chandanbrahma/neural-network | b173dcf42885b30e5ecfcc7cd2080952b2f3f35d | 1e825a213d16fbe89de4f14ca91a64600fff8ef1 | refs/heads/main | 2023-01-07T03:26:20.038522 | 2020-11-03T17:53:34 | 2020-11-03T17:53:34 | 309,765,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,412 | py | ## importing the dataset
import pandas as pd
data=pd.read_csv('E:\\assignment\\neural network\\concrete.csv')
data.head()
data.describe()
data.info()
## so we do have 1030 rows and 59 columns and we need to build a Neural Network model to predict strength
##normalizing the data
data_new=(data-data.min())/(data.max()-data.min())
data_new.head()
data_new.describe()
##selecting the target variales and the predictors
x=data_new.iloc[:,:8]
y=data_new.iloc[:,8]
##training and testing
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.3)
##building and Training our First neural network
from keras.models import Sequential
from keras.layers import Dense
model = Sequential([Dense(50, activation='relu', input_shape=(8,)),Dense(20, activation='relu'),Dense(1, activation='relu'),])
model.compile(loss="mean_squared_error",optimizer = "adam",metrics = ["mse"]) ##calculating the errors so accordingly weights will be assigned
import numpy as np
first_model = model
first_model.fit(np.array(x_train),np.array(y_train),epochs=10)
y_pred = first_model.predict(np.array(x_test))
y_pred = pd.Series([i[0] for i in y_pred])
rmse_value = np.sqrt(np.mean((y_pred-y_test)**2))
import matplotlib.pyplot as plt
plt.plot(y_pred,y_test,"bo")
np.corrcoef(y_pred,y_test)
##so we got a corelation of 0.860 | [
"[email protected]"
] | |
56b59620d9dc8ccfa21e2140cfdc3cd9375545cb | 63bae38d352462f437a5ab40c4807e1714efdda4 | /PT_rewrite/views.py | 03354bbead5f84ab6ab1ef902da7cb798e66f64a | [] | no_license | yogesh-pro/pt | 77091c6ffc094850fc59a4b918a3854858755995 | 2e0d40924c518283ec98b4a079f960020d29f5be | refs/heads/main | 2022-12-28T08:35:55.902205 | 2020-10-08T08:43:21 | 2020-10-08T08:43:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.shortcuts import render
def home(request):
return render(request,'index.html')
def table(request):
return render(request,'periodic_table.html')
def about(request):
return render(request,'about.html')
| [
"[email protected]"
] | |
a8b269b2648bd093e81c37c7f5d75eeb801f4c90 | 87917d36e5f0c500b71a78819e99cb5805f6819b | /tests/application.py | d52834b56ac3241605f1ae2af2525eea887fde4c | [] | no_license | nswalters/AppTrakz-API | 982c7595c60fcdb42fb0e9a41d4b290555e3e6f4 | 12cb34b9a290a8445e8baaa6c454dd386a01fe29 | refs/heads/main | 2023-03-24T00:58:14.235690 | 2021-03-22T01:12:03 | 2021-03-22T01:12:03 | 342,913,270 | 0 | 0 | null | 2021-03-22T01:12:04 | 2021-02-27T17:15:28 | Python | UTF-8 | Python | false | false | 4,265 | py | from django.contrib.auth.models import User
import json
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from apptrakzapi.models import Company, Status
class ApplicationTests(APITestCase):
def setUp(self) -> None:
"""
Configure initial requirements for Company Tests
"""
# Create our user
url = "/register"
data = {
"username": "testuser",
"email": "[email protected]",
"password": "testpassword",
"first_name": "test",
"last_name": "user"
}
response = self.client.post(url, data, format='json')
json_response = json.loads(response.content)
self.token = json_response["token"]
self.userID = json_response["id"]
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# create a company to add the job to
url = "/companies"
data = {
"name": "TestCompany",
"address1": "1234 Test St",
"address2": "suite 999",
"city": "Testing",
"state": "TG",
"zipcode": 12345,
"website": "https://www.test.com"
}
self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token)
response = self.client.post(url, data, format='json')
json_response = json.loads(response.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Create the job to apply to
url = "/jobs"
data = {
"company": 1,
"role_title": "TestRole",
"type": "Test",
"qualifications": "TestQuals",
"post_link": "https://www.testpostlink.com",
"salary": None,
"description": "Just a test to create a job."
}
self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token)
response = self.client.post(url, data, format='json')
json_response = json.loads(response.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Create our initial 'Applied' status
new_status = Status.objects.create(name='Applied')
new_status.save()
# Create a secondary status to verify updates
second_status = Status.objects.create(name='Did not Move On')
second_status.save()
def test_create_new_job_application(self):
"""
Verify we can create a new job application.
"""
url = "/applications"
data = {
"job": 1
}
self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token)
response = self.client.post(url, data, format='json')
json_response = json.loads(response.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(json_response["id"], 1)
self.assertEqual(json_response["statuses"][0]["id"], 1)
self.assertEqual(json_response["statuses"][0]["name"], "Applied")
self.assertEqual(json_response["job"]["id"], 1)
self.assertEqual(json_response["job"]["role_title"], "TestRole")
def test_update_job_application(self):
"""
Verify we can update a job application.
"""
self.test_create_new_job_application()
url = "/applications/1"
data = {
"is_active": False,
"status": 2,
"reason": "None Given"
}
self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token)
response = self.client.put(url, data, format='json')
json_response = json.loads(response.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(json_response["id"], 1)
self.assertEqual(json_response["statuses"][0]["id"], 1)
self.assertEqual(json_response["statuses"][0]["name"], "Applied")
self.assertEqual(json_response["statuses"][1]["id"], 2)
self.assertEqual(json_response["statuses"]
[1]["name"], "Did not Move On")
self.assertEqual(json_response["job"]["id"], 1)
self.assertEqual(json_response["job"]["role_title"], "TestRole")
| [
"[email protected]"
] | |
400af35cb0e7b467098d199040f510e8489b5184 | a1d9d43d110cc50b4007f08777c21cc6a603fd19 | /blog/blog/asgi.py | 3d80cd0c5d96f185ab1fd775db1d01976d7a4596 | [] | no_license | EverydayLearner254/basic_blog | 2f5b05742c8cb92ff32be4bab423288660d8280f | cc4f9a2f06a90ccf3141f41e287adafb730cdd9c | refs/heads/master | 2022-10-03T07:04:24.436981 | 2020-05-05T00:56:20 | 2020-05-05T00:56:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
ASGI config for Blog_Master project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Blog_Master.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
807b224716b7742d4bd308bdf0a7c644e5f29c6c | 6bc77013027222ffc4ecb1524729a2ef580d78b3 | /olfactory/containers/spans/fragment.py | 49e6f1b52e51bf1fef243eb05263776779f2e5fd | [
"Apache-2.0"
] | permissive | OctaveLauby/olfactory | 819ae759637e36df460daa6447ca6f5b990fea2f | 679b67459c12002041a8f77e1bdffe33d776500b | refs/heads/master | 2020-04-30T22:11:04.845805 | 2019-03-22T14:38:39 | 2019-03-22T14:38:39 | 177,112,233 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,094 | py | import numpy as np
from datetime import timedelta
from olutils import float2dt
from .span import SpanBase
class Fragment(np.ndarray, SpanBase):
"""Fragment of series descriptor"""
def __new__(cls, a):
obj = np.asarray(a).astype(float).view(cls)
return obj
def __init__(self, x):
"""Init a fragment instance of x series
Args:
x (np.array)
"""
self._is_empty = len(x) == 0
# ----------------------------------------------------------------------- #
# Quick access
@property
def is_empty(self):
return self._is_empty
@property
def fst(self):
return self[0] if not self.is_empty else np.nan
@property
def lst(self):
return self[-1] if not self.is_empty else np.nan
@property
def n_pts(self):
return len(self)
@property
def span(self):
return self.lst - self.fst
@property
def step(self):
with np.errstate(divide='raise'):
try:
return self.span / (self.n_pts-1)
except (ZeroDivisionError, FloatingPointError):
return np.nan
# ----------------------------------------------------------------------- #
# Processing
def split(self, indexes, share=False):
result = []
lst_i = 0
for i in sorted(set(indexes)):
if i == 0:
continue
result.append(Fragment(self[lst_i:(i+share)]))
lst_i = i
if lst_i != self.n_pts - 1:
result.append(Fragment(self[lst_i:]))
return result
# ----------------------------------------------------------------------- #
# Utils
def __repr__(self):
return self.__str__()
def __str__(self):
if self.is_empty:
return "<Empty Frag>"
return "<Frag | {fst} to {lst} | {n_pts} pts | step={step}>".format(
fst=float2dt(self.fst),
lst=float2dt(self.lst),
n_pts=self.n_pts,
step=timedelta(seconds=int(self.step)),
)
| [
"[email protected]"
] | |
282a44f9438e0c71d27fa942358378c13ac33b5d | d0e5a7485dc77cb36f2e18645358615df5452cfe | /demo01/booktest/views.py | fdae35131fe7e8b4fc58c2ce6a01699234f7ddc4 | [] | no_license | 960253116/ggz | 809d3d55b24813a0ed3d20b0618df7cb3d35a7d0 | 2dd5cf562624e6edae865d698355fe851797837e | refs/heads/master | 2023-04-01T11:37:55.370207 | 2019-06-21T12:26:59 | 2019-06-21T12:26:59 | 190,000,241 | 0 | 0 | null | 2023-03-31T14:41:24 | 2019-06-03T12:25:01 | CSS | UTF-8 | Python | false | false | 2,724 | py | # from django.shortcuts import render
# # from django.http import HttpResponse
# # """
# # MVT中的V 视图模块
# # """
# # # Create your views here.
# # def index(req):
# # return HttpResponse("这里是首页")
# # #
# # def list(req):
# # return HttpResponse("这里是投票页")
from django.shortcuts import render,redirect,reverse
from django.http import HttpResponse
from .models import topicInfo,NameInfo
# Create your views here.
from django.views.generic import View
# class LoginView(View):
# def get(self,req):
# return render(req,"polls/login.html")
#
# def post(self, req):
# username = req.POST.get("username")
# pwd = req.POST.get("password")
# # cookie实在response里设置
# res = redirect(reverse("polls:index"))
# res.set_cookie("username", username)
# return res
#用装饰器装饰一下
def checklogin(fun):
def check(req,*args):
# if req.COOKIES.get('username'):
# return func(req,*args)
if req.session.get("username"):
return fun(req,*args)
else:
return redirect(reverse('booktest:login'))
return check
@checklogin
def index(req):
# return HttpResponse("index")
return render(req, 'booktest/index.html',{ "username":"ggz" })
@checklogin
def detail(req,id):
# return HttpResponse("detail %s"% id)
if req.method == "GET":
top = topicInfo.objects.get(pk=id)
return render(req, 'booktest/detail.html', locals())
elif req.method == "POST":
c_id=req.POST.get('ppt')
res=NameInfo.objects.get(pk=c_id)
res.vote+=1
res.save()
return redirect(reverse('booktest:result',args=(id,)))
# res = NameInfo.objects.get(name=req.POST.get('ppt'))
# res.option += 1
# res.save()
# top=topicInfo.objects.get(pk=id)
# return render(req, 'booktest/result.html', locals())
# return HttpResponse('qqq')
@checklogin
def list(req):
vote1 = topicInfo.objects.all()
return render(req, 'booktest/list.html', {"vote1": vote1})
# return HttpResponse("list")
@checklogin
def result(req, id):
top=topicInfo.objects.get(pk=id)
return render(req, 'booktest/result.html', {'top':top})
def login(req):
if req.method == "GET":
return render(req,"booktest/login.html")
elif req.method =="POST":
username = req.POST.get("username")
pwd=req.POST.get("password")
req.session["username"]=username
return redirect(reverse("booktest:index"))
#cookie实在response里设置
# res = redirect(reverse("booktest:index"))
# res.set_cookie("username", username)
# return res | [
"[email protected]"
] | |
7d84b61ff15c53184676e0777e146997b46c1174 | 40369438872d1de956d877012211452dfee408a4 | /4_3buildEmotionMatrix.py | ef68cfeaccda7c75c6e0051acab2244e4ef4c5c1 | [] | no_license | changshuai5/Emotion_analysis | 1187889a4e5aa3ad37683f50f811b09417cd741b | 73795ee2188c4657281a9b4ce82ed663597e5353 | refs/heads/master | 2020-03-25T02:00:59.033489 | 2018-08-02T09:09:26 | 2018-08-02T09:09:26 | 143,269,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,199 | py | import nltk
import nltk.data
from nltk.tokenize import WordPunctTokenizer
import xlwt
import xlrd
import numpy as np
import gensim
import numpy as np
import numpy.matlib
import codecs, sys, string, re
#向量x和向量y之间的余弦相似度
def CosineDistance(x, y):
return np.dot(x, y) / (np.linalg.norm(x) * np.linalg.norm(y))
def getAllWords(filename):
words=[]
workbook = xlrd.open_workbook(filename)
sheet = workbook.sheet_by_index(0)
for row in range(1, sheet.nrows):
word = []
list_temp = sheet.row_values(row)
if int(list_temp[5]) >=5:
for i in range(7):
word.append(list_temp[i])
words.append(word)
return words
def getEmotionWords(words):
# 乐
pa_list = []
pe_list = []
# 怒
na_list = []
# 哀
nb_list = []
nj_list = []
# 惧
ni_list = []
nc_list = []
# 惊
pc_list = []
for word in words:
if word[4] == "PA" and (int(word[6]) == 0 or int(word[6]) == 1):
pa_list.append(word)
elif word[4] == "PE" and (int(word[6]) == 0 or int(word[6]) == 1):
pe_list.append(word)
elif word[4] == "NA" and (int(word[6]) == 0 or int(word[6]) == 2):
na_list.append(word)
elif word[4] == "NB" and (int(word[6]) == 0 or int(word[6]) == 2):
nb_list.append(word)
elif word[4] == "NJ" and (int(word[6]) == 0 or int(word[6]) == 2):
nj_list.append(word)
elif word[4] == "NI" and (int(word[6]) == 0 or int(word[6]) == 2):
ni_list.append(word)
elif word[4] == "NC" and (int(word[6]) == 0 or int(word[6]) == 2):
nc_list.append(word)
elif word[4] == "PC" and (int(word[6]) == 0 or int(word[6]) == 1):
pc_list.append(word)
return pa_list,pe_list,na_list,nb_list,nj_list,ni_list,nc_list,pc_list
def getNeighborVec(wordList,model):
b = {}
for word1 in wordList:
a = []
for word2 in wordList:
try:
distance = CosineDistance(model[word1[0]], model[word2[0]])
if distance > 0.6 and distance < 0.99:
a.append(word2[0])
except KeyError:
# wordList.remove(word2)
continue
except TypeError:
continue
if len(a) < 3:
continue
else:
b[word1[0]] = a
return b
def dfp(fun,gfun,hess,x0):
#功能:用DFP算法求解无约束问题:min fun(x)
#输入:x0式初始点,fun,gfun,hess分别是目标函数和梯度,Hessian矩阵格式
#输出:x,val分别是近似最优点,最优解,k是迭代次数
maxk = 1e5
rho = 0.05
sigma = 0.4
epsilon = 1e-12 #迭代停止条件
k = 0
n = np.shape(x0)[0]
#将Hessian矩阵初始化为单位矩阵
Hk = np.linalg.inv(hess(x0))
while k < maxk:
gk = gfun(x0)
if np.linalg.norm(gk) < epsilon:
break
dk = -1.0*np.dot(Hk,gk)
# print dk
m = 0
mk = 0
while m < 20:#用Armijo搜索步长
if fun(x0 + rho**m*dk) < fun(x0) + sigma*rho**m*np.dot(gk,dk):
mk = m
break
m += 1
#print mk
#DFP校正
x = x0 + rho**mk*dk
print("第"+str(k)+"次的迭代结果为:"+str(x))
sk = x - x0
yk = gfun(x) - gk
if np.dot(sk,yk) > 0:
Hy = np.dot(Hk,yk)
sy = np.dot(sk,yk) #向量的点积
yHy = np.dot(np.dot(yk,Hk),yk) #yHy是标量
Hk = Hk - 1.0*Hy.reshape((n,1))*Hy/yHy + 1.0*sk.reshape((n,1))*sk/sy
k += 1
x0 = x
return x0,fun(x0),k
def getVec(T,V):
fun = lambda x: np.sum(np.diag(np.dot(x-V,(x-V).T)))
gfun = lambda x: 2 * np.sum(x-V,axis=0)
dem=V.shape[1]
a = numpy.matlib.identity(dem)
hess = lambda x: np.array((2 * V.shape[0]) * a)
x0, fun0, k = dfp(fun, gfun, hess,T)
return x0
# 返回各个词类向量的聚类中心
def getWordVecs(wordDict,model):
T = np.zeros(200)
vecList=[]
for wordC,wordlist in wordDict.items():
vecs = []
for word in wordlist:
try:
vecs.append(model[word])
except KeyError:
continue
vecs.append(model[wordC])
if len(vecs) > 0:
vecsArray = getVec(T, np.array(vecs))
vecList.append(vecsArray)
return vecList
if __name__ == '__main__':
# print(splitSentence("我爱你我的家My name is Tom."))
model = gensim.models.Word2Vec.load('data/word2vec/rs200.hy.text.model')
File = "data/情感词汇本体.xlsx"
words=getAllWords(File)
pa_list, pe_list, na_list, nb_list, nj_list, ni_list, nc_list, pc_list=getEmotionWords(words)
T=np.zeros(200)
pa_dict=getNeighborVec(pa_list,model)
pa_vecList=getWordVecs(pa_dict,model)
pe_dict = getNeighborVec(pe_list, model)
pe_vecList = getWordVecs(pe_dict, model)
na_dict = getNeighborVec(na_list, model)
na_vecList = getWordVecs(na_dict, model)
nb_dict = getNeighborVec(nb_list, model)
nb_vecList = getWordVecs(nb_dict, model)
nj_dict = getNeighborVec(nj_list, model)
nj_vecList = getWordVecs(nj_dict, model)
ni_dict = getNeighborVec(ni_list, model)
ni_vecList = getWordVecs(ni_dict, model)
nc_dict = getNeighborVec(nc_list, model)
nc_vecList = getWordVecs(nc_dict, model)
pc_dict = getNeighborVec(pc_list, model)
pc_vecList = getWordVecs(pc_dict, model)
EmotionMatrix = np.concatenate((pa_vecList, pe_vecList, na_vecList, nb_vecList, nj_vecList, ni_vecList, nc_vecList, pc_vecList), axis=0)
# print("type:",type(EmotionMatrix))
# print("len:",len(EmotionMatrix))
# print("shape:",EmotionMatrix.shape)
np.save("data/vecs/EmotionMatrix_3.npy", EmotionMatrix)
print("well done")
| [
"[email protected]"
] | |
59f19f70e646a12fbb19d865006751979f8bfb86 | a186054a721093ec7f2780066f3bfb778592da5a | /menus/serializers.py | 1b7c70208d6ec1d73b625d8de2e97f06dbd589aa | [] | no_license | franciscosuca/mensaonline | 24e00d346a61c34f071c9fe27c9e31d52776a183 | 1c99bf85cbb092eff58e338d493c5415654285e5 | refs/heads/master | 2022-12-17T06:56:54.898932 | 2019-11-18T11:13:16 | 2019-11-18T11:13:16 | 195,218,986 | 1 | 0 | null | 2022-12-10T21:21:00 | 2019-07-04T10:14:03 | CSS | UTF-8 | Python | false | false | 266 | py | """
2. Create the serializer
Import the serializers module from rest_framework
"""
from rest_framework import serializers
from menus.models import Menu
class MenuSerializer(serializers.ModelSerializer):
class Meta:
model=Menu
fields="__all__"
| [
"[email protected]"
] | |
485e91adc05a83f24c85aec372f80aacde7c33d4 | 97f8ebfb63501cc62a88c2dfab6018c41663a287 | /stopwatch/stopwatch.py | 2fa936f18d98495aa7f526df89e70356e8a78ced | [] | no_license | a-bobade/GUI-projects | 09589299b8116fee58c02daf0c89d64a4652e37d | 44e63ffac85ab67b61b9650fd0cd881cb87e5d65 | refs/heads/master | 2022-12-20T04:46:11.966749 | 2020-10-14T01:50:26 | 2020-10-14T01:50:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,462 | py | from tkinter import *
count = -1
run = False
def variable(mark):
def value():
if run:
global count
# before starting
if count == -1:
show = "Starting"
else:
show = str(count)
mark['text'] = show
# count increment after
# every 1 second
mark.after(1000, value)
count += 1
value()
# While Running
def Start(mark):
global run
run = True
variable(mark)
start['state'] = 'disabled'
stop['state'] = 'normal'
reset['state'] = 'normal'
# While stopped
def Stop():
global run
start['state'] = 'normal'
stop['state'] = 'disabled'
reset['state'] = 'normal'
run = False
# For Reset
def Reset(label):
global count
count = -1
if not run:
reset['state'] = 'disabled'
mark['text'] = 'Welcome'
else:
mark['text'] = 'Start'
bobzy = Tk()
bobzy.title("StopWatch")
bobzy.config(bg="green")
bobzy.geometry("300x200")
bobzy.resizable(0, 0)
mark = Label(bobzy, text="Welcome", fg="black", font="Times 25 bold", bg="green")
mark.pack()
start = Button(bobzy, text='Start', width=25, command=lambda: Start(mark))
stop = Button(bobzy, text='Stop', width=25, state='disabled', command=Stop)
reset = Button(bobzy, text='Reset', width=25, state='disabled', command=lambda: Reset(mark))
start.pack()
stop.pack()
reset.pack()
bobzy.mainloop()
| [
"[email protected]"
] | |
0fd05317963e944006939aeb4981389ca6df3d24 | aed0850065dd467c0d0650c41987b61e94cad9c6 | /day 20/hackerrank/more_exceptions.py | 811828d4f33cb91f3b0e70bee5cd20e21f080a2c | [] | no_license | parmarjh/100day-coding-challenge | 96c79cc86a8f1e0b062b72dd5992610597e289e8 | 8b3e1f6654e4a55a08b4f938f13626fcc2aa8468 | refs/heads/master | 2023-02-18T19:51:22.200057 | 2020-12-31T12:54:10 | 2020-12-31T12:54:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | class Calculator:
def power(self,n,p):
if (n >= 0 and p >= 0):
return n ** p
else:
return("n and p should be non-negative") | [
"[email protected]"
] | |
ee1da487ea2bbedb0fff666ec55825cd633044e8 | 19dd37a05c9ca77b830a9ce2edd2bffdfe81efbb | /viewNeutronLimits.py | af6db0cb61ecfdbe299cc03d9ec32a3003af2c13 | [
"Apache-2.0"
] | permissive | Krenair/wmcs-misc-scripts | 43828aa6fd8ce414d63d5c842981e7c76c5031a4 | ae91b5756d73a9405634df9d6c32e98f21b97d5f | refs/heads/master | 2021-03-16T23:09:50.654245 | 2020-04-18T22:40:43 | 2020-04-18T22:40:43 | 246,951,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | import collections
from keystoneclient.session import Session as KeystoneSession
from keystoneclient.auth.identity.v3 import Password as KeystonePassword
from keystoneclient.v3 import Client as KeystoneClient
from neutronclient.v2_0 import client as neutronclient
def get_keystone_session(project):
return KeystoneSession(auth=KeystonePassword(
auth_url="http://cloudcontrol1003.wikimedia.org:5000/v3",
username="novaobserver",
password=open('novaobserver_password').read(),
project_name=project,
user_domain_name='default',
project_domain_name='default'
))
keystone_client = KeystoneClient(
session=get_keystone_session('bastion'),
endpoint="http://cloudcontrol1003.wikimedia.org:5000/v3",
interface='public'
)
for project in keystone_client.projects.list():
if project.name != 'admin':
session = get_keystone_session(project.name)
client = neutronclient.Client(session=session, region_name='eqiad1-r')
print(client)
# print(dir(client))
print(client.list_quotas())
# for s in client.servers.list():
# migrated.append(s.name)
| [
"[email protected]"
] | |
2411d8bbf7bf81082013425c9a63c33464876975 | 258055a409918da163120ff4807c5497713ccb54 | /python/TTJets_Hadronic/TTJets_Hadronic_11_cfi.py | ce58cd07df5008c9ec9935b32b741ac67cda4fce | [] | no_license | juifa-tsai/BprimeTobHAnalysis | d9fe65d7389c01936abd361b150796d8f395eccf | 3b1755b2c2bde07433d6c030fcce61b863ee5318 | refs/heads/master | 2021-01-01T17:28:59.334540 | 2018-01-26T00:49:58 | 2018-01-26T00:49:58 | 98,080,612 | 0 | 0 | null | 2017-07-23T07:03:09 | 2017-07-23T07:03:09 | null | UTF-8 | Python | false | false | 2,613 | py | FileNames = ['dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_247_1_i9f.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_248_1_0at.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_249_1_vjb.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_24_1_sG5.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_250_1_GH1.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_251_1_rxG.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_252_1_yfu.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_253_1_lmd.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_254_1_BrJ.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_255_1_B9d.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_256_1_jl4.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_257_1_5WX.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_258_1_h5i.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_259_1_W0C.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_25_1_StG.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_260_1_Ay0.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_261_1_lI8.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_262_1_ljs.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_263_1_mSs.root',
'dcache:/pnfs/cms/WAX/11/store/user/devdatta/NtuplesBprimeTobH/TTJets_HadronicMGDecays_8TeV-madgraph/BprimeTobH_265_1_jT4.root',
]
| [
"[email protected]"
] | |
968c83ccdd77b22ace1a5dd961126c2e9817137a | 9ec5b9fb57b9556eabecdf896d809eb307b729d2 | /dashboard/urls.py | 54460ca794e2e6655f8fe3ebaecc07e362b64864 | [] | no_license | ChandanKumar665/AquaDashboard | edcf1ef5eae0cbb2c35e209bfb0537c0db04569d | cca2053f6340963de261e101d2158d88883a2817 | refs/heads/master | 2023-01-08T03:50:23.290035 | 2019-05-29T06:22:30 | 2019-05-29T06:22:30 | 188,957,581 | 0 | 0 | null | 2023-01-03T22:54:35 | 2019-05-28T05:07:39 | JavaScript | UTF-8 | Python | false | false | 165 | py | from django.urls import path
from django.urls.conf import include
from dashboard.views import DashboardView
urlpatterns = [
path('', DashboardView.as_view())
]
| [
"[email protected]"
] | |
5c2a1d1194206dafe5cbbf17ef06868158e05d82 | 2f56dcfba92c4c82c1de2534fc05f8114c03b7c9 | /model eye13/main_lr2face_nani.py | bd9f27afcf20208f6ba1fc4642485415c22c0710 | [] | no_license | NeuralAction/eyegazemodel | 685ab3ce2073ad7ef83d2581783bcd25e5b1bd39 | b7d23ded344425738ccd0a090bd9c04080ff41f2 | refs/heads/master | 2021-09-07T13:16:25.832727 | 2018-02-23T10:22:17 | 2018-02-23T10:22:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,829 | py | # -*- cod ing:CP949 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from pympler import refbrowser
import gc
import os
import datetime
import threading
import time
import math
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import array_ops
from tensorflow.python.framework import ops
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.layers import utils
import matplotlib
#matplotlib.use('Agg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import random
import matplotlib.pyplot as plt
from vision import image
import freeze_graph
import optimize_for_inference
import multiprocessing
from multiprocessing import Process, Queue
from pympler.tracker import SummaryTracker
useSELU = False
def _variable_with_weight_decay(shape, wd=None):
# Determine number of input features from shape
f_in = np.prod(shape[:-1]) if len(shape) == 4 else shape[0]
# Calculate sdev for initialization according to activation function
if useSELU:
sdev = math.sqrt(1 / f_in)
else:
sdev = math.sqrt(2 / f_in)
var = tf.Variable(tf.truncated_normal(shape=shape, stddev=sdev))
if wd is not None:
weight_decay = tf.reduce_sum(tf.multiply(tf.nn.l2_loss(var), wd))
tf.add_to_collection('losses', weight_decay)
return var
def convWeight(shape):
return _variable_with_weight_decay(shape=shape)
def fcWeight(shape, weight_decay = 0.001):
return _variable_with_weight_decay(shape=shape, wd=weight_decay)
def biasWeight(shape):
return tf.Variable(tf.constant(0.0, shape=shape, dtype=tf.float32))
def weight_variable_deactivated_lol(shape):
l = len(shape)
base = 2
if(useSELU):
base = 1
dev = 0.1
if(l==4):
dev = math.sqrt(float(base)/float(shape[0]*shape[1]*shape[2]))
elif(l==2):
dev = math.sqrt(float(base)/float(shape[0]))
print(dev)
initial = tf.random_normal(shape, stddev=dev)
return tf.Variable(initial)
def bias_variable_deactivated_lol(shape):
initial = tf.random_normal(shape, stddev=0)
return tf.Variable(initial)
def conv2d(x, W, stride = 1, pad='SAME'):
return tf.nn.conv2d(x, W, strides=[1,stride, stride, 1], padding=pad)
def max_pool(x, size=2):
return tf.nn.max_pool(x, ksize=[1, size, size, 1], strides=[1, size, size, 1], padding='SAME')
def avg_pool_2x2(x, size=2):
return tf.nn.avg_pool(x, ksize=[1, size, size, 1], strides=[1, size, size, 1], padding='SAME')
#ref. http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow
def batch_norm(x, n_out, phase_train, scope='bn'):
with tf.variable_scope(scope):
beta = tf.Variable(tf.constant(0.0, shape=[n_out]), trainable=True)
gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), trainable=True)
batch_mean, batch_var = tf.nn.moments(x, [0,1,2])
ema = tf.train.ExponentialMovingAverage(decay=0.5)
def mean_var_with_update():
ema_apply_op = ema.apply([batch_mean, batch_var])
with tf.control_dependencies([ema_apply_op]):
return tf.identity(batch_mean), tf.identity(batch_var)
mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var)))
normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3)
return normed
def relu(tensor):
return tf.nn.relu(tensor)
def selu(x):
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale * tf.where(x >= 0.0, x, alpha * tf.nn.elu(x))
def dropout_selu(x, rate, alpha= -1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0,
noise_shape=None, seed=None, name=None, training=False):
"""Dropout to a value with rescaling."""
def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name):
keep_prob = 1.0 - rate
x = ops.convert_to_tensor(x, name="x")
# if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:
# raise ValueError("keep_prob must be a scalar tensor or a float in the "
# "range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name="alpha")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
if tensor_util.constant_value(keep_prob) == 1:
return x
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
random_tensor = keep_prob
random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype)
binary_tensor = math_ops.floor(random_tensor)
ret = x * binary_tensor + alpha * (1-binary_tensor)
a = tf.sqrt(fixedPointVar / (keep_prob *((1-keep_prob) * tf.pow(alpha-fixedPointMean,2) + fixedPointVar)))
b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha)
ret = a * ret + b
ret.set_shape(x.get_shape())
return ret
with tf.name_scope(name, "dropout", [x]) as name:
return utils.smart_cond(training,
lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name),
lambda: array_ops.identity(x))
def dropout(tensor, rate, training):
if(useSELU):
return dropout_selu(tensor, rate, training=training)
return tf.nn.dropout(tensor, rate)
def activate(tensor):
if(useSELU):
return selu(tensor)
return relu(tensor)
def shape(tensor):
s = tensor.get_shape()
return tuple([s[i].value for i in range(0, len(s))])
def resBlockPool(tensor, poolsize=2):
return avg_pool_2x2(tensor, size=poolsize)
def conv2dSingle(pool, phase_train, useBnorm, weightShape, stride = 1, poolsize=2):
filterW = weightShape[0]
filterH = weightShape[1]
preCh = shape(pool)[3]
ch = weightShape[2]
#conv
W_conv = convWeight([filterW, filterH, preCh, ch])
b_conv = biasWeight([ch])
h_conv = conv2d(pool, W_conv, stride = stride) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, ch, phase_train)
h_conv = activate(h_conv)
h_pool = h_conv
h_pool = max_pool(h_conv, size=poolsize)
print(h_pool)
return h_pool
def conv2dShared(pool1, pool2, phase_train, useBnorm, weightShape, stride = 1, poolsize=2):
filterW = weightShape[0]
filterH = weightShape[1]
preCh = shape(pool1)[3]
ch = weightShape[2]
#conv
W_conv = convWeight([filterW, filterH, preCh, ch])
b_conv = biasWeight([ch])
h_conv = conv2d(pool1, W_conv, stride = stride) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, ch, phase_train)
h_conv = activate(h_conv)
h_pool = h_conv
h_pool = max_pool(h_conv, size=poolsize)
print(h_pool)
#conv_r
W_convR = W_conv
b_convR = b_conv
h_convR = conv2d(pool2, W_convR, stride = stride) + b_convR
if(useBnorm):
h_convR = batch_norm(h_convR, ch, phase_train)
h_convR = activate(h_convR)
h_poolR = h_convR
h_poolR = max_pool(h_convR, size=poolsize)
print(h_poolR)
return h_pool, h_poolR
def resBlock(tensor, ch, phase_train, useBnorm, poolsize = 1):
# res blocks example
# block = resBlock(input, 64, phase_train, useBnorm)
# block = resBlock(block, 64, phase_train, useBnorm)
# block = resBlock(block, 64, phase_train, useBnorm)
# block = resBlockPool(block)
# blockR = resBlockPool(blockR)
if not ch % 4 == 0:
print("input channel assert fail: ch % 4 != true")
raise Exception()
print("Building ResBlockShared")
prePool = tensor
pool = tensor
if(poolsize>1):
pool = resBlockPool(pool, poolsize=poolsize)
prePool = pool
preCh = shape(pool)[3]
if not (ch == preCh):
print("not preCh == ch", "preCh:", preCh, "ch:", ch)
chneck = int(ch / 4)
#first
W_conv = convWeight([1, 1, preCh, chneck])
b_conv = biasWeight([chneck])
h_conv = conv2d(pool, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
pool = h_conv
print(pool)
#second
W_conv = convWeight([3, 3, chneck, chneck])
b_conv = biasWeight([chneck])
h_conv = conv2d(pool, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
pool = h_conv
print(pool)
#last
W_conv = convWeight([1, 1, chneck, ch])
b_conv = biasWeight([ch])
h_conv = conv2d(pool, W_conv) + b_conv
pool = h_conv
print(pool)
#res
if(ch == preCh):
pool = pool + prePool
if(useBnorm):
pool = batch_norm(pool, ch, phase_train)
pool = activate(pool)
print(pool)
return pool
def resBlockShared(tensor, tensorR, ch, phase_train, useBnorm, poolsize=1):
# res blocks example
# block, blockR = resBlockShared(h_conv2, h_conv2R, 64, phase_train, useBnorm)
# block, blockR = resBlockShared(block, blockR, 64, phase_train, useBnorm)
# block, blockR = resBlockShared(block, blockR, 64, phase_train, useBnorm)
# block = resBlockPool(block)
# blockR = resBlockPool(blockR)
if not ch % 4 == 0:
print("input channel assert fail: ch % 4 != true")
raise Exception()
print("Building ResBlockShared")
prePool = tensor
prePoolR = tensorR
pool = tensor
poolR = tensorR
preCh = shape(pool)[3]
if(poolsize > 1):
pool = resBlockPool(pool, poolsize=poolsize)
poolR = resBlockPool(poolR, poolsize=poolsize)
prePool = pool
prePoolR = poolR
if not (ch == preCh):
print("not preCh == ch", "preCh:", preCh, "ch:", ch)
chneck = int(ch / 4)
#first
W_conv = convWeight([1, 1, preCh, chneck])
b_conv = biasWeight([chneck])
h_conv = conv2d(pool, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
pool = h_conv
h_conv = conv2d(poolR, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
poolR = h_conv
print(pool, poolR)
#second
W_conv = convWeight([3, 3, chneck, chneck])
b_conv = biasWeight([chneck])
h_conv = conv2d(pool, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
pool = h_conv
h_conv = conv2d(poolR, W_conv) + b_conv
if(useBnorm):
h_conv = batch_norm(h_conv, chneck, phase_train)
h_conv = activate(h_conv)
poolR = h_conv
print(pool, poolR)
#last
W_conv = convWeight([1, 1, chneck, ch])
b_conv = biasWeight([ch])
h_conv = conv2d(pool, W_conv) + b_conv
pool = h_conv
h_conv = conv2d(poolR, W_conv) + b_conv
poolR = h_conv
print(pool, poolR)
#res
if(preCh == ch):
pool = pool + prePool
if(useBnorm):
pool = batch_norm(pool, ch, phase_train)
pool = activate(pool)
if(preCh == ch):
poolR = poolR + prePoolR
if(useBnorm):
poolR = batch_norm(poolR, ch, phase_train)
poolR = activate(poolR)
print(pool, poolR)
return pool, poolR
def flat(tensor):
tShape = shape(tensor)
return tf.reshape(tensor, [-1, tShape[1] * tShape[2] * tShape[3]])
def fc(tensor, nodeNum, keep_prob, phase_train, name=None):
fcsize = shape(tensor)[1]
W_fc = fcWeight([fcsize, nodeNum])
b_fc = biasWeight([nodeNum])
return dropout(activate(tf.add(tf.matmul(tensor, W_fc), b_fc, name=name)), keep_prob, phase_train)
#evaluate new model
def eval(bsize=20, tbsize=20, ep = 25, lr = 1e-4, debugstep=8, savepath=None, savemodel = False, useBnorm = True, droprate = 0.5, decay_ephoc=1, decay_rate=0.8, loadmodel=False):
testdrop = 1.0
if(useSELU):
testdrop = 0.0
#input vars
x_l = tf.placeholder(tf.float32, shape=[None, data.imagesize, data.imagesize, 3], name='input_image')
x_r = tf.placeholder(tf.float32, shape=[None, data.imagesize, data.imagesize, 3], name='input_image_r')
x_f = tf.placeholder(tf.float32, shape=[None, data.facesize, data.facesize, 3], name='input_image_f')
y_ = tf.placeholder(tf.float32, shape=[None, 2], name='input_label')
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
phase_train = tf.placeholder(tf.bool, name='phase_train')
print("x_l_image=", x_l)
print("x_r_image=", x_r)
print("x_f_image=", x_f)
#eye
h_pool, h_poolR = conv2dShared(x_l, x_r, phase_train, useBnorm, [7, 7, 16])
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 64, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 64, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 64, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 92, phase_train, useBnorm, poolsize=2)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 92, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 92, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 120, phase_train, useBnorm, poolsize=2)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 120, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 120, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 148, phase_train, useBnorm, poolsize=2)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 148, phase_train, useBnorm)
h_pool, h_poolR = resBlockShared(h_pool, h_poolR, 148, phase_train, useBnorm)
shape_clast = shape(h_pool)
print(shape_clast)
h_pool = tf.concat([flat(h_pool), flat(h_poolR)],1)
print(h_pool)
# W_fc1 = fcWeight([size_clast * 2, 128])
# b_fc1 = biasWeight([128])
# h_fc1 = activate(tf.matmul(h_flat, W_fc1) + b_fc1)
# h_fc1 = dropout(h_fc1, keep_prob, training = phase_train)
# print(h_fc1)
#face
f_pool = conv2dSingle(x_f, phase_train, useBnorm, [7, 7, 16])
f_pool = resBlock(f_pool, 64, phase_train, useBnorm)
f_pool = resBlock(f_pool, 64, phase_train, useBnorm)
f_pool = resBlock(f_pool, 64, phase_train, useBnorm)
f_pool = resBlock(f_pool, 92, phase_train, useBnorm, poolsize = 2)
f_pool = resBlock(f_pool, 92, phase_train, useBnorm)
f_pool = resBlock(f_pool, 92, phase_train, useBnorm)
f_pool = resBlock(f_pool, 120, phase_train, useBnorm, poolsize = 2)
f_pool = resBlock(f_pool, 120, phase_train, useBnorm)
f_pool = resBlock(f_pool, 120, phase_train, useBnorm)
f_pool = resBlock(f_pool, 148, phase_train, useBnorm, poolsize = 2)
f_pool = resBlock(f_pool, 148, phase_train, useBnorm)
f_pool = resBlock(f_pool, 148, phase_train, useBnorm)
size_fpool = shape(f_pool)
print(size_fpool)
f_pool = flat(f_pool)
print(f_pool)
# Wf_fc1 = fcWeight([size_fpool, 100])
# bf_fc1 = biasWeight([100])
# f_fc1 = activate(tf.matmul(f_flat, Wf_fc1) + bf_fc1)
# f_fc1 = dropout(f_fc1, keep_prob, training = phase_train)
# print(f_fc1)
#final FC
fi_pre = tf.concat([f_pool, h_pool], 1)
print(fi_pre)
fi_pool = fc(fi_pre, 128, keep_prob, phase_train)
print(fi_pool)
#regression
fi_pool_size = shape(fi_pool)[1]
W_fclast = fcWeight([fi_pool_size, 2])
b_fclast = biasWeight([2])
y_nn = tf.add(tf.matmul(fi_pool, W_fclast), b_fclast, name="output")
#accuracy
worst_correct_prediction = tf.reduce_max(tf.sqrt(tf.reduce_sum(tf.square(y_nn - y_), 1)))
best_correct_prediction = tf.reduce_min(tf.sqrt(tf.reduce_sum(tf.square(y_nn - y_), 1)))
mean_correct_prediction = tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square(y_nn - y_), 1)))
correct_precent = 100 - mean_correct_prediction / data.anglemul * 100
print(best_correct_prediction)
#trainer
dist = tf.sqrt(tf.reduce_sum(tf.square(y_nn - y_), 1))
#angle diff?
#loss = tf.reduce_mean(tf.atan(dist))
#mean sum sqr
#0.08 test error (wo batch)
loss = tf.reduce_mean(tf.reduce_sum(tf.square(y_nn - y_), 1) / 2)
#mean sqr
#loss = tf.reduce_mean(tf.square(y_nn - y_))
#distance
#loss = dist
#l2 loss
#wb125 0.12 wo weight decay
#loss = tf.nn.l2_loss(y_nn - y_)
global_step = tf.Variable(0, trainable=False)
if(useRateDecay):
starter_learning_rate = lr
decay_r = decay_ephoc * data.size / bsize
print("decay step:", decay_r)
learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step, int(decay_r), decay_rate, staircase=True)
else:
learning_rate = lr
if(useWeightDecay):
tf.add_to_collection('losses', loss)
loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
if(useBnorm):
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, scope='bn')
with tf.control_dependencies(update_ops):
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss, global_step=global_step)
else:
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss, global_step=global_step)
print(loss)
#ready temp vars
ephoc = ep
last_step = 0
last_time = time.time()
step = 0
lastephoc = -1
lastgc = -1
testacc = 0
acc_max = 0
acc_ephoc = []
acc_means = []
acc_test = []
acc_lr = []
acc_steps = []
acc_loss = []
acc_sum = 0.0
acc_count = 0.0
step_per_sec = 0
checkpoint_state_name = "checkpoint_state"
checkpoint_prefix = os.path.join(savedir, "saved_checkpoint")
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
#init saver
if(savemodel or loadmodel):
saver = tf.train.Saver()
#session init
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
if(savepath != None and savemodel):
tf.train.write_graph(sess.graph_def, '', os.path.join(savepath, "graph.pb"))
if(loadmodel):
print("restoring model")
saver.restore(sess, checkpoint_prefix+'-0')
#train loop
step_total = int(data.size * ephoc / bsize)
for i in range(0, step_total):
batch_img_l, batch_img_r, batch_img_f, batch_label = data.batch(bsize)
step += bsize
if (i !=0 and i % debugstep == 0) or i == (step_total - 1):
#run train acc
feeding = { x_l:batch_img_l, x_r:batch_img_r, x_f:batch_img_f, y_:batch_label, phase_train:False, keep_prob: testdrop }
req_fetch = [loss, correct_precent, mean_correct_prediction, worst_correct_prediction, best_correct_prediction, y_nn[0], y_[0]]
if(useRateDecay):
req_fetch.append(learning_rate)
fetches = sess.run(req_fetch, feed_dict=feeding)
#acc update
tacc = fetches[2]
if(acc_max < tacc):
acc_max = tacc
acc_means.append(tacc)
acc_steps.append(step)
acc_sum += tacc
acc_count+=1
acc_loss.append(fetches[0])
acc_lr.append(fetches[len(fetches)-1]*1000)
#run test acc
tbatch_img_l, tbatch_img_r, tbatch_img_f, tbatch_label = datatest.batch(tbsize, randomize = False)
tfeeding = { x_l:tbatch_img_l, x_r:tbatch_img_r, x_f:tbatch_img_f, y_:tbatch_label, phase_train:False, keep_prob: testdrop }
tfetches = sess.run([mean_correct_prediction, correct_precent], feed_dict=tfeeding)
testacc = tfetches[0]
acc_test.append(testacc)
#ephoc update
e = math.floor(step/data.size)
if(e!=lastephoc) or (step_total - 1) == i:
millis = int(round(time.time() * 1000))
if(millis - lastgc > 300000):
print("Garbage Colleting...")
gc.collect()
lastgc = millis
lastephoc = e
acc_ephoc.append(acc_sum / acc_count)
acc_sum = 0
acc_count = 0
#save model
print("Graph Saving...")
if(savepath != None and savemodel):
savedpath = saver.save(sess, checkpoint_prefix, global_step=0, latest_filename=checkpoint_state_name)
print("Graph saved in:", savedpath)
#save plot
print("Plot Saving...")
fig = plt.Figure()
canvas = FigureCanvasTkAgg(fig)
ax = fig.add_subplot(111)
ax.plot(acc_steps, acc_means, label="Train")
ax.plot(acc_steps, acc_test, label="Test")
ax.plot(acc_steps, acc_loss, label="Loss")
ax.plot(acc_steps, acc_lr, label="L.Rate*1000")
ax.set_ylim([0, 0.45])
ax.set_xlabel("step")
ax.set_ylabel("mean error")
ax.legend(loc='upper right')
ax.grid(True)
pltname = modeltitle + " ephocs " + str(e) + "-" + str(ep) + " anglemul " + str(data.anglemul) + " lr " + str(lr) + ".png"
pltname = "MEAN ACC " + str((testacc+acc_ephoc[-1])*0.5) + " TEST ACC " + str(testacc) + " TRAIN ACC " + str(acc_ephoc[-1]) + " " + pltname
pltfile = os.path.join(savedir, pltname)
canvas.print_figure(pltfile)
fig.clf()
fig.clear()
plt.clf()
plt.cla()
plt.close()
print("Saved Plot : " + pltname)
del fig, canvas, pltfile, pltname, ax
#print debug msg
time_now = time.time()
step_per_sec = (step - last_step) / (time_now - last_time)
print("Epoch: "+str(e)+" Step: "+str(step)+" Fetches:"+str(fetches)+" TFectches:"+str(tfetches) + " Steps/Second:"+str(step_per_sec))
last_step = step
last_time = time_now
#free mem
for item in fetches:
item = None
fetches.clear()
for item in tfetches:
item = None
tfetches.clear()
for item in feeding:
item = None
feeding.clear()
for item in tfeeding:
item = None
tfeeding.clear()
del tbatch_img_l, tbatch_img_r, tbatch_img_f, tbatch_label, tfetches, fetches, feeding, tfeeding
#train nn
feeding = {x_l: batch_img_l, x_r: batch_img_r, x_f: batch_img_f, y_: batch_label, phase_train:True, keep_prob: droprate }
t = sess.run([train_step], feed_dict=feeding)
for item in t:
item = None
t.clear()
for item in feeding:
item = None
feeding.clear()
del batch_img_l, batch_img_r, batch_img_f, batch_label, t, feeding
#report acc per ephoc
print("Ephoc Accuracies: ")
for ei in range(0, len(acc_ephoc)):
print("Ephoc " + str(ei) + " : " + str(acc_ephoc[ei]))
#save model
if(savepath != None and savemodel):
saver.save(sess, checkpoint_prefix, global_step=0, latest_filename=checkpoint_state_name)
tf.reset_default_graph()
gc.collect()
#return acc
return acc_ephoc[-1] , testacc
class EvalScore:
def __init__(self, lr, anglemul, accuracy):
self.lr = lr
self.anglemul = anglemul
self.accuracy = accuracy
def print(self):
return "acc: " + str(self.accuracy) + " lr: " + str(self.lr) + " anglemul: " + str(self.anglemul)
def HyperparamatersOpt(datasize = 500):
saved = []
data.size = datasize
for testind in range(0, 200):
data.anglemul = 360
#data.anglemul = random.randrange(1 , 1500)
lr = 10 ** (float(random.randrange(24000,55000)) / 10000.0 * -1)
datatest.anglemul = data.anglemul
print("Randomized LR and Angle: " + str([lr, data.anglemul]))
evalacc = eval(bsize=20, ep = 15, lr = lr, debugstep=3)
saved.append(EvalScore(lr, data.anglemul, evalacc[1]))
print("Eval " + str(testind) + " result: " + str(evalacc))
report = ""
accmax = -10000000
accmaxind = -1
for i in range(0, len(saved)):
s = saved[i]
report += s.print() +"\n"
if s.accuracy > accmax:
accmax = s.accuracy
accmaxind = i
print("=======PROGRASS=======")
print(report)
if(accmaxind > -1):
print("Max Accuracy: " + saved[accmaxind].print())
print("========REPORT========")
def Train(save = False, loadmodel=False, ep=60, useBnorm=True, bsize=20, tbsize=20, debugStep=100, decayEphoc=4):
data.anglemul = 1
datatest.anglemul = data.anglemul
datatest.imagesize = data.imagesize
lr = 0.0001
drop = 0.75
if(useSELU):
drop = 0.05
evalacc = eval(bsize=bsize, loadmodel=loadmodel, ep = ep, lr = lr, debugstep=debugStep, savepath=savedir, savemodel = save, useBnorm=useBnorm, droprate=drop, tbsize=tbsize, decay_ephoc=decayEphoc)
print("result: " + str(evalacc))
def FreezeGraph(usecpu = False):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# create a session
with tf.Session(config=config) as sess:
# import best model
saver = tf.train.import_meta_graph(os.path.join(savedir, 'saved_checkpoint-0.meta')) # graph
saver.restore(sess, os.path.join(savedir, 'saved_checkpoint-0')) # variables
# get graph definition
gd = sess.graph.as_graph_def()
# fix batch norm nodes
for node in gd.node:
if node.op == 'RefSwitch':
node.op = 'Switch'
for index in range(len(node.input)):
if 'moving_' in node.input[index]:
node.input[index] = node.input[index] + '/read'
elif node.op == 'AssignSub':
node.op = 'Sub'
if 'use_locking' in node.attr: del node.attr['use_locking']
# generate protobuf
converted_graph_def = graph_util.convert_variables_to_constants(sess, gd, ["output"])
tf.train.write_graph(converted_graph_def, savedir, 'frozen.pb', as_text=False)
#ref. https://blog.metaflow.fr/tensorflow-how-to-freeze-a-model-and-serve-it-with-a-python-api-d4f3596b3adc
def load_graph(frozen_graph_filename):
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="name",
op_dict=None,
producer_op_list=None
)
return graph
def ModelTest(filename="frozen.pb", count=100, useBnorm=True, debugOp=False, testdata=None):
testdrop = 1.0
if(useSELU):
testdrop = 0.0
datatest.anglemul = 1
filepath = os.path.join(savedir, filename)
graph = load_graph(filepath)
if(debugOp):
for op in graph.get_operations():
print(op.name)
x_l = graph.get_tensor_by_name('name/input_image:0')
x_r = graph.get_tensor_by_name('name/input_image_r:0')
x_f = graph.get_tensor_by_name('name/input_image_f:0')
keep_prob = graph.get_tensor_by_name('name/keep_prob:0')
y = graph.get_tensor_by_name('name/output:0')
if(useBnorm or useSELU):
phase_train = graph.get_tensor_by_name('name/phase_train:0')
errors = []
pltLabel = []
pltResult = []
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# We launch a Session
with tf.Session(graph=graph, config=config) as sess:
start = time.time()
for i in range(count):
img_l, img_r, img_f, lb = testdata.batch(1, randomize=False)
if(useBnorm or useSELU):
feed = { x_l: img_l, x_r: img_r, x_f: img_f, keep_prob:testdrop, phase_train:False }
else:
feed = { x_l: img_l, x_r: img_r, x_f: img_f, keep_prob:testdrop }
fetch = sess.run(y, feed_dict=feed)
error = np.average(np.sqrt(np.sum(np.square(lb - fetch), axis=1)))
pltLabel.append(lb[0])
pltResult.append(fetch[0])
if not(math.isnan(error) or error > 10000000):
errors.append(error)
print(lb, fetch, error)
del img_l, img_r, img_f, lb
end = time.time()
print("running time(sec)", end-start, "run/s", count/(end-start))
print("mean", np.mean(errors))
print("max", np.max(errors))
print("min", np.min(errors))
print("std", np.std(errors))
print("writing plot")
plt.ylim(-1,1)
plt.xlim(-1,1)
errFac = 1/max(errors)
for i in range(0, len(pltLabel)):
diff = pltResult[i] - pltLabel[i]
plt.arrow(pltLabel[i][0], pltLabel[i][1], diff[0], diff[1], head_width=0.013, width=0.003, color=matplotlib.colors.to_rgba((1,0,0,1-max(errors[i]*errFac,0))))
plt.show()
print("end")
def __getstate__():
self_dict = self.__dict__.copy()
del self_dict['p']
return self_dict
def __setstate__(state):
__dict__.update(state)
if __name__ == "__main__":
#model options
logdir = "C:\\Users\\AinL\\Documents\\Visual Studio Code\\eyegazemodels\\log\\"
savedir = "C:\\Users\\AinL\\Documents\\Visual Studio Code\\eyegazemodels\\model eye13\\"
modeltitle = "face2"
#use selu options
useSELU = False
#batch norm
useBnorm = True
#weight decaying
useWeightDecay = True
#learning rate decaying
useRateDecay = True
#load data before training. less cpu use, more training time
dataPreLoad = False
#checkpoint
loadCheckPoint = True
saveCheckPoint = True
#running
if(dataPreLoad):
from vision import eyemodel_lr2face_preread as eyemodel_lr2face
else:
from vision import eyemodel_lr2face as eyemodel_lr2face
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
basedir = "C:\\Library\\정올 2017\\Source\\GazeDataset\\"
data = eyemodel_lr2face.decodeData([basedir + "eyesub1\\", basedir + "eyesub2\\", basedir + "eyesub3\\", basedir + "eyesub4\\", basedir + "eyesub5\\", basedir + "eyesub6\\", basedir + "eyesub7\\"], p)
data.imagesize = 60
data.facesize = 60
data.debug = False
#data.size = 50000
datatest = eyemodel_lr2face.decodeData([basedir + "valid1\\"], p)
datatest.imagesize = data.imagesize
datatest.facesize = data.facesize
datatest.debug = False
#operations
#Train(saveCheckPoint, loadmodel=loadCheckPoint, ep=250, useBnorm=useBnorm, bsize=10, tbsize=10, debugStep=20, decayEphoc=15)
FreezeGraph()
ModelTest("frozen.pb", count=200, useBnorm=useBnorm, testdata=datatest) | [
"[email protected]"
] | |
2980f2e663b0889e908e148788685956b736e69d | 308e293c9f140b82d63684166e08a7ef0c3d6e94 | /udemy_web_scroller.py | 4631d85068a68e6c84b76a4c8bb65f2f33f29cb0 | [] | no_license | El-Tatane/data_viz | 04d0ec930fad317b16eef09f74a2a98cd8d401de | 413f8933f397047658dbc4d69782b17796f6fc95 | refs/heads/master | 2023-03-19T19:31:12.287213 | 2021-03-16T18:23:02 | 2021-03-16T18:23:02 | 348,076,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,292 | py | # import HTMLSession from requests_html
from requests_html import HTMLSession
import pandas as pd
import re
import numpy as np
df = pd.read_csv("/data/udemy_courses_augmented.csv", parse_dates=["published_timestamp"])
if "rating-number" not in df.columns:
df["rating-number"] = np.nan
# create an HTML Session object
session = HTMLSession()
for i, row in df.iterrows():
if i < 3380:
continue
# Use the object above to connect to needed webpage
# resp = session.get(row["url"])
resp = session.get(row["url"])
# Run JavaScript code on webpage
try:
resp.html.render()
res = resp.html.html
except Exception as e:
print(row["url"])
print(e)
continue
p = re.compile('rating-number">(.*?)</span>')
result = p.findall(res)
if len(result) == 1 or len(result) == 2 and result[0] == result[1]:
df.loc[i, "rating-number"] = result[0]
else:
print(i, row["url"], result)
if i % 20 == 0:
print(i, row["url"], result)
df.to_csv("/home/tatane/data/udemy/udemy_courses_augmented.csv", index=False)
print(df["rating-number"].unique())
print(df.head(25))
df.to_csv("/home/tatane/data/udemy/udemy_courses_augmented.csv", index=False)
if __name__ == "__main__":
pass | [
"[email protected]"
] | |
64b7b2c3d3a294acb23353278928f7e8b719f9dc | 4a17c84869cbd26d96a4b004475c22b66f81af73 | /scallop/migrations/0004_auto_20171102_1540.py | bdc9003fc65f2b6a4e92653668743bf46b1e62ce | [] | no_license | TakeMeHigher/Scallop | 3383dcee0099036fd030dee66580629c760b1074 | 1cc5dfa1b27ec4f74fb3cfa7cc591c6331495bc4 | refs/heads/master | 2021-08-14T09:59:06.475949 | 2017-11-15T09:38:25 | 2017-11-15T09:38:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-11-02 07:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scallop', '0003_auto_20171102_1537'),
]
operations = [
migrations.AlterField(
model_name='activityapply',
name='attachment',
field=models.FileField(blank=True, null=True, upload_to='./upload/attachment/', verbose_name='上传附件'),
),
]
| [
"[email protected]"
] | |
aae0e0c40b1e227facedb39b48e89eaaed0ad111 | 1b149802f4f0e5220bda7941fb7c8884d44a7af4 | /Trabajo/demostracion/deteccion_facial/deteccion_facial_video.py | 355d533c4f6e04954763220a64363769538249c2 | [] | no_license | irenchuchu/PDIH | f5a32c3802d4e1056747f13de5fb68cd5050119b | fc68c1c4cf7a01fb916307d09ce21ee7f8cb22e6 | refs/heads/master | 2023-05-08T14:48:42.271553 | 2021-05-30T19:32:03 | 2021-05-30T19:32:03 | 342,271,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,788 | py | # -*- coding: latin-1 -*-
"""
Código 1.2 - Detección facial en un vídeo en tiempo real
Este programa detecta rostros con una webcam.
Escrito por Glare y Transductor
www.robologs.net
"""
import cv2
#Cargamos nuestro classificador de Haar:
cascada_rostro = cv2.CascadeClassifier('haarcascade_frontalface_alt.xml')
# Si utilizas otro clasificador o lo tienes guardado en un directorio diferente al de este script python,
# tendrás que cambiar 'haarcascade_frontalface_alt.xml' por el path a tu fichero .xml.
#Iniciar la webcam:
webcam = cv2.VideoCapture(0)
# NOTA 1: Si no funciona puedes cambiar el índice 0 por otro, o cambiarlo por la dirección de tu webcam (p.ej. '/dev/video0')
# NOTA 2: también debería funcionar si en vez de una webcam utilizas un fichero de vídeo.
#Recordamos al usuario cuál es la tecla para salir:
print("\nRecordatorio: pulsa 'ESC' para cerrar.\n")
while(1):
#Capturar una imagen con la webcam:
valido, img = webcam.read()
#Si la imagen es válida (es decir, si se ha capturado correctamente), continuamos:
if valido:
#Convertir la imagen a gris:
img_gris = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#Buscamos los rostros:
coordenadas_rostros = cascada_rostro.detectMultiScale(img_gris, 1.3, 5)
#Recorremos el array 'coordenadas_rostros' y dibujamos los rectángulos sobre la imagen original:
for (x,y,ancho, alto) in coordenadas_rostros:
cv2.rectangle(img, (x,y), (x+ancho, y+alto), (0,0,255) , 3)
#Abrimos una ventana con el resultado:
cv2.imshow('Output', img)
#Salir con 'ESC':
k = cv2.waitKey(5) & 0xFF
if k == 27:
cv2.destroyAllWindows()
break
webcam.release()
| [
"[email protected]"
] | |
02f70663cdecddec53ac871ad167ec7ab4e8a4d9 | b4b462652e23681811e8828fd7760798c84270ad | /main.py | 64ba781d0b1913cd7b6e971e6c01a3a7d9e50236 | [] | no_license | Xetro/Python-test-game | e0178347dd0fdec8919b1d8ba23bb610b4183d2c | 617137aa6d61ab08d865d326a9b37f1b6e3391d8 | refs/heads/master | 2020-06-03T02:27:31.972977 | 2018-09-14T02:08:21 | 2018-09-14T02:08:21 | 41,674,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,071 | py | import pygame
import time
import random
import score
import data
import ai
import move
import collision
import background
pygame.init()
pygame.display.set_caption('Goblin gre v nebesa')
black = (0,0,0)
white = (255,255,255)
red = (255,0,0)
sky_blue = (172,225,255)
igraDisplay = pygame.display.set_mode((data.display_width,data.display_height))
ura = pygame.time.Clock()
game_score = score.Score()
background.generate_world()
background.load_world()
print(len(data.background_objects))
def draw(x, y, img):
igraDisplay.blit(img, (x,y))
def text_objects(text, font):
this_text = str(text)
textSurface = font.render(this_text, True, red)
return textSurface, textSurface.get_rect()
def message_display(text, size, opcija):
largeText = pygame.font.SysFont('verdana', size)
TextSurf, TextRect = text_objects(text, largeText)
if opcija == 1:
TextRect.center = ((data.display_width/2), (data.display_height/2 + 40))
else:
TextRect.center = ((data.display_width/2), (data.display_height/2))
igraDisplay.blit(TextSurf, TextRect)
pygame.display.update()
def score_display(score):
largeText = pygame.font.SysFont('verdana', 30)
TextSurf, TextRect = text_objects(score, largeText)
TextRect.center = (50,50)
igraDisplay.blit(TextSurf, TextRect)
def crash():
message_display('CRASHED!', 144, 0)
score_display(game_score.score)
pygame.display.update()
time.sleep(2)
game_score.score = 0
for enemy in data.list_of_chars:
enemy.respawn()
data.goblin.respawn()
data.is_crashed = False
game_loop()
message_display('Goblin gre v nebo PC igra', 24, 0)
time.sleep(2)
message_display('2015', 30, 1)
time.sleep(5)
def game_loop():
start_time = time.time()
gameExit = False
while not gameExit:
data.since_last_frame = ura.tick(60) / 1000.0
###########################DEBUGGING###########################
#print(data.since_last_frame)
#print(ura.get_fps())
#print(data.goblin.x_speed * data.since_last_frame)
###############################################################
move.scan_input()
ai.calc_enemies()
move.move_goblin()
move.move_enemies()
collision.update_coll()
collision.update_goblin_coll()
collision.check_coll()
igraDisplay.fill(sky_blue)
if data.is_flying == True:
moved = background.move_background()
for obj in moved:
draw(obj.x, obj.y, obj.img)
else:
objects = data.background_objects[0:20]
objects.reverse()
for obj in objects:
draw(obj.x, obj.y, obj.img)
draw(data.goblin.x, data.goblin.y, data.goblin.img)
for obj in data.list_of_chars:
draw(obj.x, obj.y, obj.img)
###########################################################DEBUGGING##################################
#pygame.draw.rect(igraDisplay, red, [data.goblin_box[0].x, data.goblin_box[0].y, data.goblin_box[0].width, data.goblin_box[0].height])
#pygame.draw.rect(igraDisplay, red, [data.goblin_box[1].x, data.goblin_box[1].y, data.goblin_box[1].width, data.goblin_box[1].height])
#pygame.draw.rect(igraDisplay, red, [data.goblin_box[2].x, data.goblin_box[2].y, data.goblin_box[2].width, data.goblin_box[2].height])
#pygame.draw.rect(igraDisplay, red, [data.goblin_box[3].x, data.goblin_box[3].y, data.goblin_box[3].width, data.goblin_box[3].height])
#pygame.draw.rect(igraDisplay, red, [data.goblin_box[4].x, data.goblin_box[4].y, data.goblin_box[4].width, data.goblin_box[4].height])
##########################################################################################################
score_display(game_score.get_score(start_time))
pygame.display.update()
if data.is_crashed == True:
crash()
game_loop()
pygame.quit()
quit() | [
"[email protected]"
] | |
f59baf7de71031e6500f015ec01ae53c367407fb | b4cdde95fddba78d797510e74aacdeb8ef29038b | /cleaner/views.py | 1b343660e86a1dc4e0be41574ecd1742bf5712fc | [] | no_license | GwendolynYang/survival_api | 043530bfe8d3e6e286fd28089f14194cdb962eff | f9dc3d8c8c0fee41594a1d502ce4dc69d8178e7f | refs/heads/master | 2022-12-14T04:33:46.849870 | 2019-08-03T20:43:21 | 2019-08-03T20:43:21 | 200,399,319 | 0 | 1 | null | 2022-06-21T22:28:31 | 2019-08-03T16:50:22 | Python | UTF-8 | Python | false | false | 6,282 | py | from django.shortcuts import render
import numpy as np
import pandas as pd
#from cleaner.apps import mhv_dict
# Create your views here.
def get_street(soup):
# get Address
obj = soup.find("span", class_="Text__TextBase-sc-1cait9d-0 dhOdUy")
#obj = soup.find("span", attrs={"data-testid": "home-details-summary-headline"})
#print(obj)
street = obj.text
return street
def get_city(soup):
# get city, State, zipcode
obj = soup.find("span",
class_="HomeSummaryShared__CityStateAddress-vqaylf-0 fyHNRA Text__TextBase-sc-1cait9d-0 hUlhgk")
city = obj.text
#print(city)
return city
def get_dom(soup):
"""
return int
"""
# get city, State, zipcode
objlist = soup.find("ul", attrs={"data-testid": "home-features"})
for obj in objlist.find_all('li'):
items = obj.text.strip().split(' ')
if items[1:4] == ['Days', 'on', 'Trulia']:
# print(items)
dom = int(items[0])
return dom
def get_price_from_h3(soup):
"""
return int
Get price from header-3 (h3) in the begining of the page
Current price. may not be original listing price
"""
obj = soup.find('h3')
price_str = obj.text
price = int(price_str.strip('$').replace(',', ''))
return price #price_str
def get_price_from_history(priceHistory):
"""
return int(price), str(date)
# Get latest listing price
# If the latest history record is 'sold',
# then listing price is not in the price history table.
# Then 'return 0'
"""
if not priceHistory:
return 0, 0
for row in priceHistory:
if row[2] == 'Sold':
return 0, 0
elif row[2] == 'Listed For Sale':
date = row[0]
price_str = row[1]
price = int(price_str.strip('$').replace(',', ''))
return price_str, price, date
def get_price_history_2(soup):
"""
return list of lists, i.e, [[date, price, event]], string
get the price history table from webpage,
no address, just 3-column table
"""
objtable = soup.find('div', attrs={"data-testid": "price-history-container"})
priceHistory = []
if objtable.find_all('tr') is None:
priceHistory.append([np.nan, np.nan, np.nan])
else:
for tr in objtable.find_all('tr'):
tempHist = [td.text for td in tr.find_all('td')]
if len(tempHist) == 3:
priceHistory.append(tempHist)
return priceHistory
def get_zipcode(soup):
obj = soup.find('h1').get_text()
zipcode = obj.split()[-1]
return zipcode
def get_lotsize(soup):
"""
return int
"""
objlist = soup.find("ul", attrs={"data-testid": "home-features"})
for obj in objlist.find_all('li'):
items = obj.text.strip().split(' ')
if items[:2] == ['Lot', 'Size:']:
#print(items)
if items[-1] == 'sqft':
lotsize = int(items[2].replace(',', ''))
if items[-1] == 'acres':
lotsize = int(float(items[2]) * 43560)
return lotsize
def get_sqft(soup):
"""
return int
"""
# objbox = soup.find('div', attrs={'data-testid':"home-details-summary-medium"})
# obj = objbox.find('div', class_="MediaBlock__MediaContent-skmvlj-1 dCsAgE")
objbox = soup.find(class_="StyledSectionContainer__Container-hjriq0-0 jtfHO")
objlist = objbox.find_all('div', class_="MediaBlock__MediaContent-skmvlj-1 dCsAgE")
for obj in objlist:
text = obj.get_text()
items = text.strip().split(' ')
if items[1] == 'sqft':
sqft = int(items[0].replace(',', ''))
return sqft
def get_eventMonth(priceHistory):
mList, mSold = 0, 0
for row in priceHistory:
if row[2] == 'Listed For Sale':
mList = int(row[0][:2])
break
elif row[2] == 'Sold':
mSold = int(row[0][:2])
return mList, mSold
def get_eventCount(priceHistory):
# do not count records before 1989
nList, nPC, nSold = 0, 0, 0
for row in priceHistory:
if row[2] == 'Listed For Sale':
if int(row[0][-4:]) > 1989:
nList += 1
elif row[2] == 'Price Change':
if int(row[0][-4:]) > 1989:
nPC += 1
elif row[2] == 'Sold':
if int(row[0][-4:]) > 1989:
nSold += 1
return nList, nPC, nSold
def get_r2m(price, zipcode):
"""
return %2f, float.
median is a global variable, a dictionary
"""
# zipcode bug. a four digit zipcode is missing a '0' as the first digit
if zipcode[0] == '0':
zipcode = zipcode[1:]
mhv = pd.read_csv('./survival_api_data/MedianHomeValue.csv')
mhv_dict = dict(zip(mhv.zipcode.astype(str), mhv.MedianHomeValue))
if zipcode in mhv_dict:
# this should read from a dictionary
medianHV = mhv_dict[zipcode] # median house value of this area
else:
medianHV = 950000.0
r2m = round(price / medianHV, 2)
return r2m
def featPrep(soup):
"""
return a dictionary. keys are named as dataframe's column name
"""
street = get_street(soup)
city = get_city(soup)
dom = get_dom(soup)
pH = get_price_history_2(soup)
price_str, listPrice, date = get_price_from_history(pH)
cPrice = get_price_from_h3(soup)
mList, mSold = get_eventMonth(pH)
nList, nPC, nSold = get_eventCount(pH)
lotsize = get_lotsize(soup)
zipcode = get_zipcode(soup)
sqft = get_sqft(soup)
address = street + ', ' + city
if listPrice == 0:
price = cPrice
discount = 0
else:
price = listPrice
discount = round((listPrice-cPrice)/listPrice, 2)
ratio = get_r2m(price, zipcode)
feat_dict = {
"address": address,
"days": dom, # int
"discount": discount, # float %2f
"listingPrice": price_str,
"price": price, # int
"r2M": ratio,
"MonthList": mList,
"MonthSold": mSold,
"NumList": nList,
"NumPC": nPC,
"NumSold": nSold,
"zipcode": zipcode,
"sqft": sqft,
"lotsize": lotsize
}
return feat_dict
| [
"[email protected]"
] | |
114be21a34ff2ddbff7b7d182d8613f4cb5770c8 | 15e6385746ccf4b8eb6c6e302aca236021bb8781 | /Other Data Structures_qstackheap/le84_largest_rectangle_inhistogram.py | 10fae62df2e1404369481b8502e83c61180612a5 | [] | no_license | akb46mayu/Data-Structures-and-Algorithms | 11c4bbddc9b4d286e1aeaa9481eb6a620cd54746 | de98494e14fff3e2a468da681c48d60b4d1445a1 | refs/heads/master | 2021-01-12T09:51:32.618362 | 2018-05-16T16:37:18 | 2018-05-16T16:37:18 | 76,279,268 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,319 | py | """
Given n non-negative integers representing the histogram's bar height where the width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit.
For example,
Given heights = [2,1,5,6,2,3],
return 10.
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
if heights is None or len(heights) == 0:
return 0
area = 0
n= len(heights)
stack = []
for i in range(n+1):# for idx i, compute i-1's (i-2,i-3 if exists) height according to its leftmose(stack[-1]) and rightmost(i) min pillar
if i == n:
curHeight = -1
else:
curHeight = heights[i]
while stack and curHeight <= heights[stack[-1]]:
h = heights[stack.pop()]
if stack:
w = i-1-stack[-1]
else: # when stack is empty, means sequnce is increasing, which does not have the leftmost element.
w = i
area = max(area, w*h)
stack.append(i)
return area
| [
"[email protected]"
] | |
de296b43759f75677af22028ff6c48ec9ce092e3 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/1/c2g.py | 1ac6e76798c081ec5b0fc6446fdee8536f2d29f3 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'c2G':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
b84396f84f4bfec99aa7625e8d2b39f43db47ca0 | b394a56502a46b51d7db05cd237e26d10686415e | /python_mysql/below_average.py | 94d4a1a947722b2f3d0b78e8a2c006333157c3f9 | [] | no_license | shishi5089/PyCharm_Projects | 092ffca117f8c864d0b510285e0c6ef42593754b | 4e05fdf3b70e6290a76d93f69b7d36a1e780a3c9 | refs/heads/master | 2022-04-22T22:53:51.110296 | 2020-04-28T10:16:25 | 2020-04-28T10:16:25 | 259,600,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | from mysql.connector import connect
db = connect(host='localhost', user='root', passwd='', database='python_db')
cursor = db.cursor()
sql = "select names , height from students where height < (SELECT AVG(height)FROM students)"
cursor.execute(sql)
data = cursor.fetchall()
print(data)
for item in data:
print(item[0],item[1]) | [
"[email protected]"
] | |
786bd6edd4e6462b480795388da60ddb8cff1acb | 6117c4df6b0c382064c187c49d861efaf7e757ab | /75/69.py | 819c63367cab9fd8cec1e80b5e501162ea851eff | [
"MIT"
] | permissive | ElyKar/Euler | 8bc4050870275d649199dbab5c930ffeb7b5d708 | 38744b553b22565ac30ece06e2e3fbf3408068e2 | refs/heads/master | 2021-01-10T17:13:20.291206 | 2016-02-21T16:16:40 | 2016-02-21T16:16:40 | 52,213,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | #!/bin/python
from math import sqrt
from fractions import gcd
import time
def div(x):
cur = 2
while cur <= sqrt(x):
if x%cur == 0: return False
cur += 1
return x != 1
def firstDiv(n):
for p in primes:
if n%p == 0: return p
return 0
def totient(x):
if tots[x-1] != 0: return tots[x-1]
first = firstDiv(x)
if first == 2:
div = x/2
if div%2 == 0: tots[x-1] = 2*totient(div)
else: tots[x-1] = totient(div)
else:
d = gcd(first, x/first)
if d != 1:
tots[x-1] = totient(first)*totient(x/first)*d/totient(d)
else:
tots[x-1] = totient(first)*totient(x/first)
return tots[x-1]
primes = [x for x in range(1, 1000000) if div(x)]
tots = [0 for i in range(1000000)]
for p in primes: tots[p-1] = p-1
start = time.time()
maxTot = 0
maxN = 0
for x in range(2,10000001):
tot = totient(x)
if (x*1./tot) > maxTot:
maxTot = x*1./tot
maxN = x
print maxN
print time.time()-start | [
"[email protected]"
] | |
64a783b6691013ba8e0860999d123c8b6a50963e | dc0f18ead291ffd1404b4504aaccb0eaa36426fc | /build/drivers/camera_calibration/image_pipeline/depth_image_proc/catkin_generated/pkg.installspace.context.pc.py | 4ebdc84640d3fb0adb07f845a5e120c8277881ba | [] | no_license | adityavgupta/Intro_to_robotics | cb80fd708270b413bf1dca7171fcd013840292b0 | a4095832c215e7ba9b3bab0cf50389db88d249b9 | refs/heads/master | 2023-01-21T22:37:32.797024 | 2020-12-08T21:20:41 | 2020-12-08T21:20:41 | 303,027,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include".split(';') if "${prefix}/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-ldepth_image_proc".split(';') if "-ldepth_image_proc" != "" else []
PROJECT_NAME = "depth_image_proc"
PROJECT_SPACE_DIR = "/home/ur3/catkin_avgupta3/install"
PROJECT_VERSION = "1.13.0"
| [
"[email protected]"
] | |
9fab134b6befbb4b28932dde056364591c7e0d39 | 930c207e245c320b108e9699bbbb036260a36d6a | /BRICK-RDFAlchemy/generatedCode/brick/brickschema/org/schema/_1_0_2/Brick/Occupied_Heating_Min_Supply_Air_Flow.py | 0424ebc569799a5c4ee9349b05b742169e3a8d68 | [] | no_license | InnovationSE/BRICK-Generated-By-OLGA | 24d278f543471e1ce622f5f45d9e305790181fff | 7874dfa450a8a2b6a6f9927c0f91f9c7d2abd4d2 | refs/heads/master | 2021-07-01T14:13:11.302860 | 2017-09-21T12:44:17 | 2017-09-21T12:44:17 | 104,251,784 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | from rdflib import Namespace, Graph, Literal, RDF, URIRef
from rdfalchemy.rdfSubject import rdfSubject
from rdfalchemy import rdfSingle, rdfMultiple, rdfList
from brick.brickschema.org.schema._1_0_2.Brick.Occupied_Heating_Supply_Air_Flow import Occupied_Heating_Supply_Air_Flow
from brick.brickschema.org.schema._1_0_2.Brick.Heating_Min_Supply_Air_Flow import Heating_Min_Supply_Air_Flow
class Occupied_Heating_Min_Supply_Air_Flow(Occupied_Heating_Supply_Air_Flow,Heating_Min_Supply_Air_Flow):
rdf_type = Namespace('https://brickschema.org/schema/1.0.2/Brick#').Occupied_Heating_Min_Supply_Air_Flow
| [
"[email protected]"
] | |
2cff202abc4ccbf9f70b3a725e734553ac9a8edb | 8419eaa22e58a2efbb7bdf1bccfc66a9e3288d75 | /tensorflow/python/debug/wrappers/local_cli_wrapper.py | c46a4e7d1aa6dc23eaaad4a0920f649ca8624272 | [
"Apache-2.0"
] | permissive | PipelineAI/tensorflow | f539227fd5d3f304b4f246877e35303dbd388a0c | 5d8e69768230ea8765a7c78cf1fa22c3ab2a4757 | refs/heads/master | 2021-05-05T21:54:02.830548 | 2018-01-15T04:30:05 | 2018-01-15T04:30:05 | 115,791,564 | 0 | 1 | Apache-2.0 | 2018-01-15T05:38:46 | 2017-12-30T11:08:37 | C++ | UTF-8 | Python | false | false | 25,911 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger Wrapper Session Consisting of a Local Curses-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import shutil
import sys
import tempfile
# Google-internal import(s).
from tensorflow.python.debug.cli import analyzer_cli
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import command_parser
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import profile_analyzer_cli
from tensorflow.python.debug.cli import stepper_cli
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.debug.lib import common
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.wrappers import framework
_DUMP_ROOT_PREFIX = "tfdbg_"
class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
"""Concrete subclass of BaseDebugWrapperSession implementing a local CLI.
This class has all the methods that a `session.Session` object has, in order
to support debugging with minimal code changes. Invoking its `run()` method
will launch the command-line interface (CLI) of tfdbg.
"""
def __init__(self,
sess,
dump_root=None,
log_usage=True,
ui_type="curses",
thread_name_filter=None):
"""Constructor of LocalCLIDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
dump_root: (`str`) optional path to the dump root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
`run()` calls and removed afterwards. If `None`, the debug dumps will
be at tfdbg_<random_string> under the system temp directory.
log_usage: (`bool`) whether the usage of this class is to be logged.
ui_type: (`str`) requested UI type. Currently supported:
(curses | readline)
thread_name_filter: Regular-expression white list for thread name. See
the doc of `BaseDebugWrapperSession` for details.
Raises:
ValueError: If dump_root is an existing and non-empty directory or if
dump_root is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.BaseDebugWrapperSession.__init__(
self, sess, thread_name_filter=thread_name_filter)
if not dump_root:
self._dump_root = tempfile.mktemp(prefix=_DUMP_ROOT_PREFIX)
else:
if os.path.isfile(dump_root):
raise ValueError("dump_root path points to a file: %s" % dump_root)
elif os.path.isdir(dump_root) and os.listdir(dump_root):
raise ValueError("dump_root path points to a non-empty directory: %s" %
dump_root)
self._dump_root = dump_root
self._initialize_argparsers()
# Registered tensor filters.
self._tensor_filters = {}
# Register frequently-used filter(s).
self.add_tensor_filter("has_inf_or_nan", debug_data.has_inf_or_nan)
# Below are the state variables of this wrapper object.
# _active_tensor_filter: what (if any) tensor filter is in effect. If such
# a filter is in effect, this object will call run() method of the
# underlying TensorFlow Session object until the filter passes. This is
# activated by the "-f" flag of the "run" command.
# _run_through_times: keeps track of how many times the wrapper needs to
# run through without stopping at the run-end CLI. It is activated by the
# "-t" option of the "run" command.
# _skip_debug: keeps track of whether the current run should be executed
# without debugging. It is activated by the "-n" option of the "run"
# command.
#
# _run_start_response: keeps track what OnRunStartResponse the wrapper
# should return at the next run-start callback. If this information is
# unavailable (i.e., is None), the run-start CLI will be launched to ask
# the user. This is the case, e.g., right before the first run starts.
self._active_tensor_filter = None
self._active_tensor_filter_run_start_response = None
self._run_through_times = 1
self._skip_debug = False
self._run_start_response = None
self._is_run_start = True
self._ui_type = ui_type
def _initialize_argparsers(self):
self._argparsers = {}
ap = argparse.ArgumentParser(
description="Run through, with or without debug tensor watching.",
usage=argparse.SUPPRESS)
ap.add_argument(
"-t",
"--times",
dest="times",
type=int,
default=1,
help="How many Session.run() calls to proceed with.")
ap.add_argument(
"-n",
"--no_debug",
dest="no_debug",
action="store_true",
help="Run through without debug tensor watching.")
ap.add_argument(
"-f",
"--till_filter_pass",
dest="till_filter_pass",
type=str,
default="",
help="Run until a tensor in the graph passes the specified filter.")
ap.add_argument(
"--node_name_filter",
dest="node_name_filter",
type=str,
default="",
help="Regular-expression filter for node names to be watched in the "
"run, e.g., loss, reshape.*")
ap.add_argument(
"--op_type_filter",
dest="op_type_filter",
type=str,
default="",
help="Regular-expression filter for op type to be watched in the run, "
"e.g., (MatMul|Add), Variable.*")
ap.add_argument(
"--tensor_dtype_filter",
dest="tensor_dtype_filter",
type=str,
default="",
help="Regular-expression filter for tensor dtype to be watched in the "
"run, e.g., (float32|float64), int.*")
ap.add_argument(
"-p",
"--profile",
dest="profile",
action="store_true",
help="Run and profile TensorFlow graph execution.")
self._argparsers["run"] = ap
ap = argparse.ArgumentParser(
description="Invoke stepper (cont, step, breakpoint, etc.)",
usage=argparse.SUPPRESS)
self._argparsers["invoke_stepper"] = ap
ap = argparse.ArgumentParser(
description="Display information about this Session.run() call.",
usage=argparse.SUPPRESS)
self._argparsers["run_info"] = ap
self._argparsers["print_feed"] = command_parser.get_print_tensor_argparser(
"Print the value of a feed in feed_dict.")
def add_tensor_filter(self, filter_name, tensor_filter):
"""Add a tensor filter.
Args:
filter_name: (`str`) name of the filter.
tensor_filter: (`callable`) the filter callable. See the doc string of
`DebugDumpDir.find()` for more details about its signature.
"""
self._tensor_filters[filter_name] = tensor_filter
def on_session_init(self, request):
"""Overrides on-session-init callback.
Args:
request: An instance of `OnSessionInitRequest`.
Returns:
An instance of `OnSessionInitResponse`.
"""
return framework.OnSessionInitResponse(
framework.OnSessionInitAction.PROCEED)
def on_run_start(self, request):
"""Overrides on-run-start callback.
Invoke the CLI to let user choose what action to take:
`run` / `invoke_stepper`.
Args:
request: An instance of `OnRunStartRequest`.
Returns:
An instance of `OnRunStartResponse`.
"""
self._is_run_start = True
self._update_run_calls_state(
request.run_call_count, request.fetches, request.feed_dict,
is_callable_runner=request.is_callable_runner)
if self._active_tensor_filter:
# If we are running until a filter passes, we just need to keep running
# with the previous `OnRunStartResponse`.
return self._active_tensor_filter_run_start_response
self._exit_if_requested_by_user()
if self._run_call_count > 1 and not self._skip_debug:
if self._run_through_times > 0:
# Just run through without debugging.
return framework.OnRunStartResponse(
framework.OnRunStartAction.NON_DEBUG_RUN, [])
elif self._run_through_times == 0:
# It is the run at which the run-end CLI will be launched: activate
# debugging.
return (self._run_start_response or
framework.OnRunStartResponse(
framework.OnRunStartAction.DEBUG_RUN,
self._get_run_debug_urls()))
if self._run_start_response is None:
self._prep_cli_for_run_start()
self._run_start_response = self._launch_cli()
if self._active_tensor_filter:
self._active_tensor_filter_run_start_response = self._run_start_response
if self._run_through_times > 1:
self._run_through_times -= 1
self._exit_if_requested_by_user()
return self._run_start_response
def _exit_if_requested_by_user(self):
if self._run_start_response == debugger_cli_common.EXPLICIT_USER_EXIT:
# Explicit user "exit" command leads to sys.exit(1).
print(
"Note: user exited from debugger CLI: Calling sys.exit(1).",
file=sys.stderr)
sys.exit(1)
def _prep_cli_for_run_start(self):
"""Prepare (but not launch) the CLI for run-start."""
self._run_cli = ui_factory.get_ui(self._ui_type)
help_intro = debugger_cli_common.RichTextLines([])
if self._run_call_count == 1:
# Show logo at the onset of the first run.
help_intro.extend(cli_shared.get_tfdbg_logo())
help_intro.extend(debugger_cli_common.RichTextLines("Upcoming run:"))
help_intro.extend(self._run_info)
self._run_cli.set_help_intro(help_intro)
# Create initial screen output detailing the run.
self._title = "run-start: " + self._run_description
self._init_command = "run_info"
self._title_color = "blue_on_white"
def on_run_end(self, request):
"""Overrides on-run-end callback.
Actions taken:
1) Load the debug dump.
2) Bring up the Analyzer CLI.
Args:
request: An instance of OnSessionInitRequest.
Returns:
An instance of OnSessionInitResponse.
"""
self._is_run_start = False
if request.performed_action == framework.OnRunStartAction.DEBUG_RUN:
partition_graphs = None
if request.run_metadata and request.run_metadata.partition_graphs:
partition_graphs = request.run_metadata.partition_graphs
elif request.client_graph_def:
partition_graphs = [request.client_graph_def]
if request.tf_error and not os.path.isdir(self._dump_root):
# It is possible that the dump root may not exist due to errors that
# have occurred prior to graph execution (e.g., invalid device
# assignments), in which case we will just raise the exception as the
# unwrapped Session does.
raise request.tf_error
debug_dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=partition_graphs)
debug_dump.set_python_graph(self._sess.graph)
passed_filter = None
if self._active_tensor_filter:
if not debug_dump.find(
self._tensor_filters[self._active_tensor_filter], first_n=1):
# No dumped tensor passes the filter in this run. Clean up the dump
# directory and move on.
self._remove_dump_root()
return framework.OnRunEndResponse()
else:
# Some dumped tensor(s) from this run passed the filter.
passed_filter = self._active_tensor_filter
self._active_tensor_filter = None
self._prep_debug_cli_for_run_end(
debug_dump, request.tf_error, passed_filter)
self._run_start_response = self._launch_cli()
# Clean up the dump generated by this run.
self._remove_dump_root()
elif request.performed_action == framework.OnRunStartAction.PROFILE_RUN:
self._prep_profile_cli_for_run_end(self._sess.graph, request.run_metadata)
self._run_start_response = self._launch_cli()
else:
# No debug information to show following a non-debug run() call.
self._run_start_response = None
# Return placeholder response that currently holds no additional
# information.
return framework.OnRunEndResponse()
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _prep_debug_cli_for_run_end(self, debug_dump, tf_error, passed_filter):
"""Prepare (but not launch) CLI for run-end, with debug dump from the run.
Args:
debug_dump: (debug_data.DebugDumpDir) The debug dump directory from this
run.
tf_error: (None or OpError) OpError that happened during the run() call
(if any).
passed_filter: (None or str) Name of the tensor filter that just passed
and caused the preparation of this run-end CLI (if any).
"""
if tf_error:
help_intro = cli_shared.get_error_intro(tf_error)
self._init_command = "help"
self._title_color = "red_on_white"
else:
help_intro = None
self._init_command = "lt"
self._title_color = "black_on_white"
if passed_filter is not None:
# Some dumped tensor(s) from this run passed the filter.
self._init_command = "lt -f %s" % passed_filter
self._title_color = "red_on_white"
self._run_cli = analyzer_cli.create_analyzer_ui(
debug_dump, self._tensor_filters, ui_type=self._ui_type,
on_ui_exit=self._remove_dump_root)
# Get names of all dumped tensors.
dumped_tensor_names = []
for datum in debug_dump.dumped_tensor_data:
dumped_tensor_names.append("%s:%d" %
(datum.node_name, datum.output_slot))
# Tab completions for command "print_tensors".
self._run_cli.register_tab_comp_context(["print_tensor", "pt"],
dumped_tensor_names)
# Tab completion for commands "node_info", "list_inputs" and
# "list_outputs". The list comprehension is used below because nodes()
# output can be unicodes and they need to be converted to strs.
self._run_cli.register_tab_comp_context(
["node_info", "ni", "list_inputs", "li", "list_outputs", "lo"],
[str(node_name) for node_name in debug_dump.nodes()])
# TODO(cais): Reduce API surface area for aliases vis-a-vis tab
# completion contexts and registered command handlers.
self._title = "run-end: " + self._run_description
if help_intro:
self._run_cli.set_help_intro(help_intro)
def _prep_profile_cli_for_run_end(self, py_graph, run_metadata):
self._init_command = "lp"
self._run_cli = profile_analyzer_cli.create_profiler_ui(
py_graph, run_metadata, ui_type=self._ui_type,
config=self._run_cli.config)
self._title = "run-end (profiler mode): " + self._run_description
def _launch_cli(self):
"""Launch the interactive command-line interface.
Returns:
The OnRunStartResponse specified by the user using the "run" command.
"""
self._register_this_run_info(self._run_cli)
response = self._run_cli.run_ui(
init_command=self._init_command,
title=self._title,
title_color=self._title_color)
return response
def _run_info_handler(self, args, screen_info=None):
output = debugger_cli_common.RichTextLines([])
if self._run_call_count == 1:
output.extend(cli_shared.get_tfdbg_logo())
output.extend(self._run_info)
if (not self._is_run_start and
debugger_cli_common.MAIN_MENU_KEY in output.annotations):
menu = output.annotations[debugger_cli_common.MAIN_MENU_KEY]
if "list_tensors" not in menu.captions():
menu.insert(
0, debugger_cli_common.MenuItem("list_tensors", "list_tensors"))
return output
def _print_feed_handler(self, args, screen_info=None):
np_printoptions = cli_shared.numpy_printoptions_from_screen_info(
screen_info)
if not self._feed_dict:
return cli_shared.error(
"The feed_dict of the current run is None or empty.")
parsed = self._argparsers["print_feed"].parse_args(args)
tensor_name, tensor_slicing = (
command_parser.parse_tensor_name_with_slicing(parsed.tensor_name))
feed_key = None
feed_value = None
for key in self._feed_dict:
key_name = common.get_graph_element_name(key)
if key_name == tensor_name:
feed_key = key_name
feed_value = self._feed_dict[key]
break
if feed_key is None:
return cli_shared.error(
"The feed_dict of the current run does not contain the key %s" %
tensor_name)
else:
return cli_shared.format_tensor(
feed_value,
feed_key + " (feed)",
np_printoptions,
print_all=parsed.print_all,
tensor_slicing=tensor_slicing,
highlight_options=cli_shared.parse_ranges_highlight(parsed.ranges),
include_numeric_summary=parsed.numeric_summary)
def _run_handler(self, args, screen_info=None):
"""Command handler for "run" command during on-run-start."""
del screen_info # Currently unused.
parsed = self._argparsers["run"].parse_args(args)
parsed.node_name_filter = parsed.node_name_filter or None
parsed.op_type_filter = parsed.op_type_filter or None
parsed.tensor_dtype_filter = parsed.tensor_dtype_filter or None
if parsed.profile:
raise debugger_cli_common.CommandLineExit(
exit_token=framework.OnRunStartResponse(
framework.OnRunStartAction.PROFILE_RUN, []))
self._skip_debug = parsed.no_debug
self._run_through_times = parsed.times
if parsed.times > 1 or parsed.no_debug:
# If requested -t times > 1, the very next run will be a non-debug run.
action = framework.OnRunStartAction.NON_DEBUG_RUN
debug_urls = []
else:
action = framework.OnRunStartAction.DEBUG_RUN
debug_urls = self._get_run_debug_urls()
run_start_response = framework.OnRunStartResponse(
action,
debug_urls,
node_name_regex_whitelist=parsed.node_name_filter,
op_type_regex_whitelist=parsed.op_type_filter,
tensor_dtype_regex_whitelist=parsed.tensor_dtype_filter)
if parsed.till_filter_pass:
# For the run-till-filter-pass (run -f) mode, use the DEBUG_RUN
# option to access the intermediate tensors, and set the corresponding
# state flag of the class itself to True.
if parsed.till_filter_pass in self._tensor_filters:
action = framework.OnRunStartAction.DEBUG_RUN
self._active_tensor_filter = parsed.till_filter_pass
self._active_tensor_filter_run_start_response = run_start_response
else:
# Handle invalid filter name.
return debugger_cli_common.RichTextLines(
["ERROR: tensor filter \"%s\" does not exist." %
parsed.till_filter_pass])
# Raise CommandLineExit exception to cause the CLI to exit.
raise debugger_cli_common.CommandLineExit(exit_token=run_start_response)
def _register_this_run_info(self, curses_cli):
curses_cli.register_command_handler(
"run",
self._run_handler,
self._argparsers["run"].format_help(),
prefix_aliases=["r"])
curses_cli.register_command_handler(
"invoke_stepper",
self._on_run_start_step_handler,
self._argparsers["invoke_stepper"].format_help(),
prefix_aliases=["s"])
curses_cli.register_command_handler(
"run_info",
self._run_info_handler,
self._argparsers["run_info"].format_help(),
prefix_aliases=["ri"])
curses_cli.register_command_handler(
"print_feed",
self._print_feed_handler,
self._argparsers["print_feed"].format_help(),
prefix_aliases=["pf"])
if self._tensor_filters:
# Register tab completion for the filter names.
curses_cli.register_tab_comp_context(["run", "r"],
list(self._tensor_filters.keys()))
if self._feed_dict:
# Register tab completion for feed_dict keys.
feed_keys = [common.get_graph_element_name(key)
for key in self._feed_dict.keys()]
curses_cli.register_tab_comp_context(["print_feed", "pf"], feed_keys)
def _on_run_start_step_handler(self, args, screen_info=None):
"""Command handler for "invoke_stepper" command during on-run-start."""
_ = screen_info # Currently unused.
# No parsing is currently necessary for invoke_stepper. This may change
# in the future when the command has arguments.
# Raise CommandLineExit exception to cause the CLI to exit.
raise debugger_cli_common.CommandLineExit(
exit_token=framework.OnRunStartResponse(
framework.OnRunStartAction.INVOKE_STEPPER, []))
def _get_run_debug_urls(self):
"""Get the debug_urls value for the current run() call.
Returns:
debug_urls: (list of str) Debug URLs for the current run() call.
Currently, the list consists of only one URL that is a file:// URL.
"""
return ["file://" + self._dump_root]
def _update_run_calls_state(self,
run_call_count,
fetches,
feed_dict,
is_callable_runner=False):
"""Update the internal state with regard to run() call history.
Args:
run_call_count: (int) Number of run() calls that have occurred.
fetches: a node/tensor or a list of node/tensor that are the fetches of
the run() call. This is the same as the fetches argument to the run()
call.
feed_dict: None of a dict. This is the feed_dict argument to the run()
call.
is_callable_runner: (bool) whether a runner returned by
Session.make_callable is being run.
"""
self._run_call_count = run_call_count
self._feed_dict = feed_dict
self._run_description = cli_shared.get_run_short_description(
run_call_count,
fetches,
feed_dict,
is_callable_runner=is_callable_runner)
self._run_through_times -= 1
self._run_info = cli_shared.get_run_start_intro(
run_call_count,
fetches,
feed_dict,
self._tensor_filters,
is_callable_runner=is_callable_runner)
def invoke_node_stepper(self,
node_stepper,
restore_variable_values_on_exit=True):
"""Overrides method in base class to implement interactive node stepper.
Args:
node_stepper: (`stepper.NodeStepper`) The underlying NodeStepper API
object.
restore_variable_values_on_exit: (`bool`) Whether any variables whose
values have been altered during this node-stepper invocation should be
restored to their old values when this invocation ends.
Returns:
The same return values as the `Session.run()` call on the same fetches as
the NodeStepper.
"""
stepper = stepper_cli.NodeStepperCLI(node_stepper)
# On exiting the node-stepper CLI, the finalize method of the node_stepper
# object will be called, ensuring that the state of the graph will be the
# same as if the stepping did not happen.
# TODO(cais): Perhaps some users will want the effect of the interactive
# stepping and value injection to persist. When that happens, make the call
# to finalize optional.
stepper_ui = ui_factory.get_ui(
self._ui_type,
on_ui_exit=(node_stepper.restore_variable_values if
restore_variable_values_on_exit else None))
stepper_ui.register_command_handler(
"list_sorted_nodes",
stepper.list_sorted_nodes,
stepper.arg_parsers["list_sorted_nodes"].format_help(),
prefix_aliases=["lt", "lsn"])
stepper_ui.register_command_handler(
"cont",
stepper.cont,
stepper.arg_parsers["cont"].format_help(),
prefix_aliases=["ct", "c"])
stepper_ui.register_command_handler(
"step",
stepper.step,
stepper.arg_parsers["step"].format_help(),
prefix_aliases=["st", "s"])
stepper_ui.register_command_handler(
"print_tensor",
stepper.print_tensor,
stepper.arg_parsers["print_tensor"].format_help(),
prefix_aliases=["pt"])
stepper_ui.register_command_handler(
"inject_value",
stepper.inject_value,
stepper.arg_parsers["inject_value"].format_help(),
prefix_aliases=["inject", "override_value", "override"])
# Register tab completion candidates.
stepper_ui.register_tab_comp_context([
"cont", "ct", "c", "pt", "inject_value", "inject", "override_value",
"override"
], [str(elem) for elem in node_stepper.sorted_nodes()])
# TODO(cais): Tie up register_tab_comp_context to a single alias to shorten
# calls like this.
return stepper_ui.run_ui(
init_command="lt",
title="Node Stepper: " + self._run_description,
title_color="blue_on_white")
| [
"[email protected]"
] | |
4af5a1e559601a3fa7639ec0400436956b86aae6 | 66aa96d18eead858d02686311e65abd0d840b107 | /Edureka/urls.py | 787b37800ac39f8568a1792612ad05d29c6592ef | [] | no_license | T-betrand/todolist | 5e0da077a16e75d39b411b9d8cc586714f60667a | d925e2f5c65e0ff534dfa869bfc11ac1651dcc53 | refs/heads/master | 2022-12-11T00:26:54.094103 | 2020-08-28T11:34:53 | 2020-08-28T11:34:53 | 286,319,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 815 | py | """Edureka URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('TasksManager/', include('TasksManager.urls')),
]
| [
"[email protected]"
] | |
b174b21a400033bea3658e42c0405f2772a218f1 | ac89e5d51d0d15ffdecfde25985c28a2af9c2e43 | /test/test_zebra_team.py | a8bfe0b4f56d71a249fa45e22d97c28ff3ed1b91 | [] | no_license | TBA-API/tba-api-client-python | 20dc4a634be32926054ffc4c52b94027ee40ac7d | 4f6ded8fb4bf8f7896891a9aa778ce15a2ef720b | refs/heads/master | 2021-07-15T16:36:32.234217 | 2020-05-07T00:20:43 | 2020-05-07T00:20:43 | 134,112,743 | 4 | 8 | null | 2019-07-01T03:14:12 | 2018-05-20T02:13:45 | Python | UTF-8 | Python | false | false | 1,961 | py | # coding: utf-8
"""
The Blue Alliance API v3
# Overview Information and statistics about FIRST Robotics Competition teams and events. # Authentication All endpoints require an Auth Key to be passed in the header `X-TBA-Auth-Key`. If you do not have an auth key yet, you can obtain one from your [Account Page](/account). A `User-Agent` header may need to be set to prevent a 403 Unauthorized error. # noqa: E501
The version of the OpenAPI document: 3.8.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import tbaapiv3client
from tbaapiv3client.models.zebra_team import ZebraTeam # noqa: E501
from tbaapiv3client.rest import ApiException
class TestZebraTeam(unittest.TestCase):
"""ZebraTeam unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ZebraTeam
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = tbaapiv3client.models.zebra_team.ZebraTeam() # noqa: E501
if include_optional :
return ZebraTeam(
team_key = 'frc7332',
xs = [
1.337
],
ys = [
1.337
]
)
else :
return ZebraTeam(
team_key = 'frc7332',
xs = [
1.337
],
ys = [
1.337
],
)
def testZebraTeam(self):
"""Test ZebraTeam"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
5021e900edc1b7d5f8d84d3657fbedd354318667 | eb5ca57075c7e7eadc0d6b85f2f648a934646a08 | /python3/write_text.py | 18ee615a400072a0e664db6a953b447fc80b582b | [] | no_license | lamplusoka/trainning | 9b16573eed1bf28f80399e44065e51cbf2cf98ce | 5bfe6d4470ef4d9775aed1a6193922c38f4140c8 | refs/heads/master | 2020-04-03T06:51:31.487284 | 2018-10-30T11:31:29 | 2018-10-30T11:31:29 | 155,086,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | #!/usr/bin/python3
file_name = "/mnt/c/users/lamplus/documents/github/trainning/python3/test_library.txt"
file = open(file_name, 'w')
file.write("dd")
file.close()
file = open(file_name)
data = file.read()
print(data) | [
"[email protected]"
] | |
982141be336b876d0e9a0e53560926e86346b3d3 | f13c586b82224c07f28f7bb7d9dd503e64eb5cb2 | /tests/transforms/test_zx.py | e5ee809a8a296678c7f2c78bffa237613d49293e | [
"Apache-2.0"
] | permissive | therooler/pennylane | 095f104e40254be2ed3050bc7be9ea9d2ee11ebd | fde1f24bd784d6ee2af5c980c2d5010b4c2bbe54 | refs/heads/master | 2023-04-29T13:32:43.115108 | 2023-04-18T09:41:42 | 2023-04-18T09:41:42 | 202,356,685 | 0 | 0 | Apache-2.0 | 2019-08-14T13:30:39 | 2019-08-14T13:30:38 | null | UTF-8 | Python | false | false | 24,635 | py | # Copyright 2022 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the `pennylane.transforms.zx` folder.
"""
import sys
import numpy as np
import pytest
import pennylane as qml
from pennylane.tape import QuantumScript
pyzx = pytest.importorskip("pyzx")
pytestmark = pytest.mark.zx
supported_operations = [
qml.PauliX(wires=0),
qml.PauliZ(wires=0),
qml.Hadamard(wires=0),
qml.S(wires=0),
qml.T(wires=0),
qml.SWAP(wires=[0, 1]),
qml.CNOT(wires=[0, 1]),
qml.CZ(wires=[0, 1]),
qml.CH(wires=[0, 1]),
]
supported_operations_params = [
qml.RX(0.3, wires=0),
qml.RZ(0.3, wires=0),
qml.CRZ(0.3, wires=[0, 1]),
]
expanded_operations = [qml.PauliY(wires=0), qml.PhaseShift(0.3, wires=0), qml.RY(0.3, wires=0)]
non_diagram_like_operations = [qml.CCZ(wires=[0, 1, 2]), qml.Toffoli(wires=[0, 1, 2])]
decompose = [True, False]
qscript = [True, False]
def test_import_pyzx(monkeypatch):
"""Test if an ImportError is raised by to_zx function."""
with monkeypatch.context() as m:
m.setitem(sys.modules, "pyzx", None)
with pytest.raises(ImportError, match="This feature requires PyZX."):
qml.transforms.to_zx(qml.PauliX(wires=0))
with pytest.raises(ImportError, match="This feature requires PyZX."):
qml.transforms.to_zx(QuantumScript([qml.PauliX(wires=0), qml.PauliZ(wires=1)]))
class TestConvertersZX:
"""Test converters to_zx and from_zx."""
@pytest.mark.parametrize("qscript", qscript)
@pytest.mark.parametrize("operation", supported_operations)
def test_supported_operation_no_params(self, operation, qscript):
"""Test to convert the script to a ZX graph and back for supported operations."""
I = qml.math.eye(2 ** len(operation.wires))
if qscript:
qscript = QuantumScript([operation])
else:
qscript = operation
matrix_qscript = qml.matrix(qscript)
zx_g = qml.transforms.to_zx(qscript)
matrix_zx = zx_g.to_matrix()
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
if not np.allclose(mat_product[0, 0], 1.0):
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
if not np.allclose(mat_product[0, 0], 1.0):
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
@pytest.mark.parametrize("qscript", qscript)
@pytest.mark.parametrize("operation", supported_operations_params)
def test_supported_operation_params(self, operation, qscript):
"""Test to convert the script to a ZX graph and back for supported operations with parameters."""
if qscript:
qscript = QuantumScript([operation])
else:
qscript = operation
I = qml.math.eye(2 ** len(operation.wires))
matrix_qscript = qml.matrix(qscript)
zx_g = qml.transforms.to_zx(qscript)
matrix_zx = zx_g.to_matrix()
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
@pytest.mark.parametrize("qscript", qscript)
@pytest.mark.parametrize("operation", expanded_operations)
def test_operation_need_expansion(self, operation, qscript):
"""Test to convert the script to a ZX graph and back for operations that needs expansions."""
if qscript:
qscript = QuantumScript([operation])
else:
qscript = operation
I = qml.math.eye(2 ** len(operation.wires))
matrix_qscript = qml.matrix(qscript)
zx_g = qml.transforms.to_zx(qscript)
matrix_zx = zx_g.to_matrix()
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
@pytest.mark.parametrize("operation", non_diagram_like_operations)
def test_non_diagram_like_op(self, operation):
"""Test operations that result in a non diagram like circuit."""
I = qml.math.eye(2 ** len(operation.wires))
qscript = QuantumScript([operation], [], [])
matrix_qscript = qml.matrix(qscript)
zx_g = qml.transforms.to_zx(qscript)
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
matrix_zx = zx_g.to_matrix()
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
with pytest.raises(qml.QuantumFunctionError, match="Graph doesn't seem circuit like"):
qml.transforms.from_zx(zx_g)
@pytest.mark.parametrize("decompose", decompose)
@pytest.mark.parametrize("operation", non_diagram_like_operations)
def test_circuit(self, operation, decompose):
"""Test a simple circuit."""
I = qml.math.eye(2**2)
operations = [
qml.RZ(5 / 4 * np.pi, wires=0),
qml.RZ(3 / 4 * np.pi, wires=1),
qml.PauliY(wires=1),
qml.RX(0.1, wires=0),
qml.PauliZ(wires=0),
qml.RY(0.2, wires=1),
qml.RZ(0.3, wires=1),
qml.PauliX(wires=1),
qml.CNOT(wires=[0, 1]),
qml.CNOT(wires=[1, 0]),
qml.SWAP(wires=[0, 1]),
]
qscript = QuantumScript(operations, [], [])
zx_g = qml.transforms.to_zx(qscript)
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
matrix_qscript = qml.matrix(qscript)
matrix_zx = zx_g.to_matrix()
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g, decompose_phases=decompose)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
def test_circuit_mod_5_4(self):
"""Test the circuit mod 5 4."""
operations = [
qml.PauliX(wires=4),
qml.Hadamard(wires=4),
qml.CNOT(wires=[3, 4]),
qml.CNOT(wires=[0, 4]),
qml.T(wires=4),
qml.CNOT(wires=[3, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[0, 4]),
qml.CNOT(wires=[0, 3]),
qml.adjoint(qml.T)(wires=3),
qml.CNOT(wires=[0, 3]),
qml.CNOT(wires=[3, 4]),
qml.CNOT(wires=[2, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[3, 4]),
qml.T(wires=4),
qml.CNOT(wires=[2, 4]),
qml.CNOT(wires=[2, 3]),
qml.T(wires=3),
qml.CNOT(wires=[2, 3]),
qml.Hadamard(wires=4),
qml.CNOT(wires=[3, 4]),
qml.Hadamard(wires=4),
qml.CNOT(wires=[2, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[1, 4]),
qml.T(wires=4),
qml.CNOT(wires=[2, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[1, 4]),
qml.T(wires=4),
qml.CNOT(wires=[1, 2]),
qml.adjoint(qml.T)(wires=2),
qml.CNOT(wires=[1, 2]),
qml.Hadamard(wires=4),
qml.CNOT(wires=[2, 4]),
qml.Hadamard(wires=4),
qml.CNOT(wires=[1, 4]),
qml.T(wires=4),
qml.CNOT(wires=[0, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[1, 4]),
qml.T(wires=4),
qml.CNOT(wires=[0, 4]),
qml.adjoint(qml.T)(wires=4),
qml.CNOT(wires=[0, 1]),
qml.T(wires=1),
qml.CNOT(wires=[0, 1]),
qml.Hadamard(wires=4),
qml.CNOT(wires=[1, 4]),
qml.CNOT(wires=[0, 4]),
]
qscript = QuantumScript(operations, [], [])
zx_g = qml.transforms.to_zx(qscript)
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
matrix_qscript = qml.matrix(qscript)
matrix_zx = zx_g.to_matrix()
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
I = qml.math.eye(2**5)
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
def test_expand_measurements(self):
"""Test with expansion of measurements."""
I = qml.math.eye(2**2)
operations = [
qml.RX(0.1, wires=0),
qml.PauliZ(wires=0),
qml.RZ(0.3, wires=1),
qml.PauliX(wires=1),
qml.CNOT(wires=[0, 1]),
qml.CNOT(wires=[1, 0]),
qml.SWAP(wires=[0, 1]),
]
measurements = [qml.expval(qml.PauliZ(0) @ qml.PauliX(1))]
qscript = QuantumScript(operations, measurements, [])
zx_g = qml.transforms.to_zx(qscript, expand_measurements=True)
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
# Add rotation Hadamard because of PauliX
operations.append(qml.Hadamard(wires=[1]))
operations_with_rotations = operations
qscript_with_rot = QuantumScript(operations_with_rotations, [], [])
matrix_qscript = qml.matrix(qscript_with_rot)
matrix_zx = zx_g.to_matrix()
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
def test_embeddings(self):
"""Test with expansion of prep."""
I = qml.math.eye(2**2)
prep = [qml.AngleEmbedding(features=[1, 2], wires=range(2), rotation="Z")]
operations = [
qml.RX(0.1, wires=0),
qml.PauliZ(wires=0),
qml.RZ(0.3, wires=1),
qml.PauliX(wires=1),
qml.CNOT(wires=[0, 1]),
qml.CNOT(wires=[1, 0]),
qml.SWAP(wires=[0, 1]),
]
qscript = QuantumScript(operations, [], prep)
zx_g = qml.transforms.to_zx(qscript)
assert isinstance(zx_g, pyzx.graph.graph_s.GraphS)
matrix_qscript = qml.matrix(qscript)
matrix_zx = zx_g.to_matrix()
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_zx.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
qscript_back = qml.transforms.from_zx(zx_g)
assert isinstance(qscript_back, qml.tape.QuantumScript)
matrix_qscript_back = qml.matrix(
qscript_back, wire_order=[i for i in range(0, len(qscript.wires))]
)
# Check whether the two matrices are each others conjugate transposes
mat_product = qml.math.dot(matrix_qscript, qml.math.conj(matrix_qscript_back.T))
# Remove global phase
mat_product /= mat_product[0, 0]
assert qml.math.allclose(mat_product, I)
def test_no_decomposition(self):
"""Cross qubit connections is not diagram-like."""
graph = pyzx.Graph(None)
q_mapper = pyzx.circuit.gates.TargetMapper()
c_mapper = pyzx.circuit.gates.TargetMapper()
inputs = []
# Create the qubits in the graph and the qubit mapper
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, 0, 0)
inputs.append(vertex)
q_mapper.set_prev_vertex(0, vertex)
q_mapper.set_next_row(0, 1)
q_mapper.set_qubit(0, 0)
# Cross qubit connection
r = q_mapper.next_row(0)
v1 = graph.add_vertex(pyzx.VertexType.Z, q_mapper.to_qubit(0), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(0), v1), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(0, v1)
q_mapper.set_qubit(1, 1)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
r = max(q_mapper.next_row(1), q_mapper.next_row(0))
v2 = graph.add_vertex(pyzx.VertexType.Z, q_mapper.to_qubit(1), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(0), v2), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(0, v2)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
r = max(q_mapper.next_row(1), q_mapper.next_row(0))
graph.add_edge((v1, v2), edgetype=pyzx.EdgeType.SIMPLE)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
graph.scalar.add_power(1)
row = max(q_mapper.max_row(), c_mapper.max_row())
outputs = []
graph.set_inputs(tuple(inputs))
graph.set_outputs(tuple(outputs))
with pytest.raises(
qml.QuantumFunctionError,
match="Cross qubit connections, the graph is not circuit-like.",
):
qml.transforms.from_zx(graph)
def test_no_suitable_decomposition(self):
"""Test that an error is raised when no suitable decomposition is found."""
operations = [qml.sum(qml.PauliX(0), qml.PauliZ(0))]
qscript = QuantumScript(operations, [], [])
with pytest.raises(
qml.QuantumFunctionError,
match="The expansion of the quantum tape failed, PyZX does not support",
):
qml.transforms.to_zx(qscript)
def test_same_type_nodes_simple_edge(self):
"""Test that a Green-Green nodes with simple edge has no corresponding circuit."""
graph = pyzx.Graph(None)
q_mapper = pyzx.circuit.gates.TargetMapper()
c_mapper = pyzx.circuit.gates.TargetMapper()
inputs = []
# Create the qubits in the graph and the qubit mapper
for i in range(2):
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, i, 0)
inputs.append(vertex)
q_mapper.set_prev_vertex(i, vertex)
q_mapper.set_next_row(i, 1)
q_mapper.set_qubit(i, i)
# Create Green Green with simple Edge
r = max(q_mapper.next_row(1), q_mapper.next_row(0))
v1 = graph.add_vertex(pyzx.VertexType.Z, q_mapper.to_qubit(1), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(1), v1), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(1, v1)
v2 = graph.add_vertex(pyzx.VertexType.Z, q_mapper.to_qubit(0), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(0), v2), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(0, v2)
graph.add_edge((v1, v2), edgetype=pyzx.EdgeType.SIMPLE)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
graph.scalar.add_power(1)
row = max(q_mapper.max_row(), c_mapper.max_row())
outputs = []
for mapper in (q_mapper, c_mapper):
for label in mapper.labels():
qubit = mapper.to_qubit(label)
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, qubit, row)
outputs.append(vertex)
pre_vertex = mapper.prev_vertex(label)
graph.add_edge(graph.edge(pre_vertex, vertex))
graph.set_inputs(tuple(inputs))
graph.set_outputs(tuple(outputs))
with pytest.raises(
qml.QuantumFunctionError,
match="Two green or respectively two red nodes connected by a ",
):
qml.transforms.from_zx(graph)
def test_different_type_node_hadamard_edge(self):
"""Test that a Green-Red nodes with Hadamard edge has no corresponding circuit."""
graph = pyzx.Graph(None)
q_mapper = pyzx.circuit.gates.TargetMapper()
c_mapper = pyzx.circuit.gates.TargetMapper()
inputs = []
# Create the qubits in the graph and the qubit mapper
for i in range(2):
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, i, 0)
inputs.append(vertex)
q_mapper.set_prev_vertex(i, vertex)
q_mapper.set_next_row(i, 1)
q_mapper.set_qubit(i, i)
# Create Green Red with Hadamard Edge
r = max(q_mapper.next_row(1), q_mapper.next_row(0))
v1 = graph.add_vertex(pyzx.VertexType.Z, q_mapper.to_qubit(1), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(1), v1), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(1, v1)
v2 = graph.add_vertex(pyzx.VertexType.X, q_mapper.to_qubit(0), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(0), v2), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(0, v2)
graph.add_edge((v1, v2), edgetype=pyzx.EdgeType.HADAMARD)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
graph.scalar.add_power(1)
row = max(q_mapper.max_row(), c_mapper.max_row())
outputs = []
for mapper in (q_mapper, c_mapper):
for label in mapper.labels():
qubit = mapper.to_qubit(label)
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, qubit, row)
outputs.append(vertex)
pre_vertex = mapper.prev_vertex(label)
graph.add_edge(graph.edge(pre_vertex, vertex))
graph.set_inputs(tuple(inputs))
graph.set_outputs(tuple(outputs))
with pytest.raises(
qml.QuantumFunctionError,
match="A green and red node connected by a Hadamard edge ",
):
qml.transforms.from_zx(graph)
def test_cx_gate(self):
"""Test that CX node is converted to the right tape"""
graph = pyzx.Graph(None)
q_mapper = pyzx.circuit.gates.TargetMapper()
c_mapper = pyzx.circuit.gates.TargetMapper()
inputs = []
# Create the qubits in the graph and the qubit mapper
for i in range(2):
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, i, 0)
inputs.append(vertex)
q_mapper.set_prev_vertex(i, vertex)
q_mapper.set_next_row(i, 1)
q_mapper.set_qubit(i, i)
# Create Green Red with Hadamard Edge
r = max(q_mapper.next_row(1), q_mapper.next_row(0))
v1 = graph.add_vertex(pyzx.VertexType.X, q_mapper.to_qubit(1), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(1), v1), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(1, v1)
v2 = graph.add_vertex(pyzx.VertexType.X, q_mapper.to_qubit(0), r)
graph.add_edge(graph.edge(q_mapper.prev_vertex(0), v2), pyzx.EdgeType.SIMPLE)
q_mapper.set_prev_vertex(0, v2)
graph.add_edge((v1, v2), edgetype=pyzx.EdgeType.HADAMARD)
q_mapper.set_next_row(1, r + 1)
q_mapper.set_next_row(0, r + 1)
graph.scalar.add_power(1)
row = max(q_mapper.max_row(), c_mapper.max_row())
outputs = []
for mapper in (q_mapper, c_mapper):
for label in mapper.labels():
qubit = mapper.to_qubit(label)
vertex = graph.add_vertex(pyzx.VertexType.BOUNDARY, qubit, row)
outputs.append(vertex)
pre_vertex = mapper.prev_vertex(label)
graph.add_edge(graph.edge(pre_vertex, vertex))
graph.set_inputs(tuple(inputs))
graph.set_outputs(tuple(outputs))
tape = qml.transforms.from_zx(graph)
expected_op = [qml.Hadamard(wires=[1]), qml.CNOT(wires=[1, 0]), qml.Hadamard(wires=[1])]
assert np.all([qml.equal(op, op_ex) for op, op_ex in zip(tape.operations, expected_op)])
def test_qnode_decorator(self):
"""Test the QNode decorator."""
dev = qml.device("default.qubit", wires=2)
@qml.transforms.to_zx(expand_measurements=True)
@qml.qnode(device=dev)
def circuit(p):
qml.RZ(p[0], wires=1),
qml.RZ(p[1], wires=1),
qml.RX(p[2], wires=0),
qml.PauliZ(wires=0),
qml.RZ(p[3], wires=1),
qml.PauliX(wires=1),
qml.CNOT(wires=[0, 1]),
qml.CNOT(wires=[1, 0]),
qml.SWAP(wires=[0, 1]),
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
params = [5 / 4 * np.pi, 3 / 4 * np.pi, 0.1, 0.3]
g = circuit(params)
assert isinstance(g, pyzx.graph.graph_s.GraphS)
| [
"[email protected]"
] | |
b3200048ee7f01b7d6c856d5342ec001c2a8962b | d1a9ba2b4ffe45d10d277c4433723cb78db1e649 | /asutils/gfk.py | 361c63e6fa069fa918980cf22654bf799c17feb0 | [
"MIT"
] | permissive | scanner/django-asutils | 0d4e166f90e87a54d09f7c1b4aee8721de475e2e | 2f65652f74f530929866cb7c216e1814cd1b3994 | refs/heads/master | 2021-06-03T09:43:10.812702 | 2021-04-28T22:29:32 | 2021-04-28T22:29:32 | 3,261,998 | 1 | 0 | NOASSERTION | 2021-04-28T22:29:32 | 2012-01-25T02:50:38 | Python | UTF-8 | Python | false | false | 2,402 | py | #
# File: $Id: gfk.py 1638 2008-09-27 01:47:23Z scanner $
#
"""
From: http://www.djangosnippets.org/snippets/1079/
his is an improvement on snippet 984. Read it's description and this
blog post for good explanations of the problem this solves -
http://zerokspot.com/weblog/2008/08/13/genericforeignkeys-with-less-queries/
Unlike snippet 984, this version is able to handle multiple generic
foreign keys, generic foreign keys with nonstandard ct_field and
fk_field names, and avoids unnecessary lookups to the ContentType
table.
To use, just assign an instance of GFKManager as the objects attribute
of a model that has generic foreign keys. Then:
MyModelWithGFKs.objects.filter(...).fetch_generic_relations()
The generic related items will be bulk-fetched to minimize the number
of queries.
"""
from django.db.models.query import QuerySet
from django.db.models import Manager
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.generic import GenericForeignKey
class GFKManager(Manager):
"""
A manager that returns a GFKQuerySet instead of a regular QuerySet.
"""
def get_query_set(self):
return GFKQuerySet(self.model)
class GFKQuerySet(QuerySet):
"""
A QuerySet with a fetch_generic_relations() method to bulk fetch
all generic related items. Similar to select_related(), but for
generic foreign keys.
Based on http://www.djangosnippets.org/snippets/984/
"""
def fetch_generic_relations(self):
qs = self._clone()
gfk_fields = [g for g in self.model._meta.virtual_fields
if isinstance(g, GenericForeignKey)]
ct_map = {}
item_map = {}
for item in qs:
for gfk in gfk_fields:
ct_id_field = self.model._meta.get_field(gfk.ct_field).column
ct_map.setdefault(
(ct_id_field, getattr(item, ct_id_field)), {}
)[getattr(item, gfk.fk_field)] = (gfk.name, item.id)
item_map[item.id] = item
for (ct_id_field, ct_id), items_ in ct_map.items():
ct = ContentType.objects.get_for_id(ct_id)
for o in ct.model_class().objects.select_related().filter(
id__in=items_.keys()).all():
(gfk_name, item_id) = items_[o.id]
setattr(item_map[item_id], gfk_name, o)
return qs
| [
"[email protected]"
] | |
585dbe7418c0b2a58abbba253e75a7c3ee39fd1e | fa8f98ac20cab0d7bcdc2fd391a18e3bb6abd933 | /rok 2018-2019/inf/Różne/szkola.py | f64342cb2cb98515cb8310158c1cb9b6b5e1d0f0 | [] | no_license | ppawelo1/pawel | ba2b460ba40eead27bc60b65407be98e916398b5 | f64e6e330887b7e792deb05c98cc18b56d51b9ae | refs/heads/master | 2021-06-21T11:35:52.241596 | 2019-09-20T05:49:04 | 2019-09-20T05:49:04 | 104,464,005 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,639 | py |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# dane_uczniow.py
#
import sqlite3
import csv
def dane_z_pliku(nazwa_pliku):
dane = [] # pusta lista na dane
with open(nazwa_pliku, newline='', encoding='utf-8') as plik:
tresc = csv.reader(plik, delimiter=';')
for rekord in tresc:
rekord = [x.strip() for x in rekord] # usunięcie białych znaków
dane.append(rekord) # dodawanie rekordów do listy
return dane
def main(args):
nazwa_bazy = 'szkola'
con = sqlite3.connect(nazwa_bazy + '.db') # połączenie z bazą
cur = con.cursor() # utworzenie kursora
# utworzenie tabeli w bazie
with open(nazwa_bazy + '.sql', 'r') as plik:
cur.executescript(plik.read()) # tworzenie tabel w bazie
# dodawanie danych do bazy
dane = dane_z_pliku('szkola.txt')
print(dane)
dane.pop(0) # usuwanie pierwszego elementu listy
cur.executemany(
'INSERT INTO uczniowie VALUES(?, ?, ?, ?, ?, ?)', dane)
# dodawanie danych do bazy
dane = dane_z_pliku('przedmioty.txt')
print(dane)
dane.pop(0) # usuwanie pierwszego elementu listy
cur.executemany(
'INSERT INTO przedmioty VALUES(?, ?, ?, ?)', dane)
# dodawanie danych do bazy
dane = dane_z_pliku('oceny.txt')
print(dane)
dane.pop(0) # usuwanie pierwszego elementu listy
cur.executemany(
'INSERT INTO oceny VALUES(?, ?, ?, ?)', dane)
con.commit() # zatwierdzenie wszystkich operacji w bazie
con.close() # zamknięcie połączenia z bazą
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv)) | [
"[email protected]"
] | |
fda661bae5535e64819a9219ffc931d12216dec7 | a68f8f9444376cfa96eced8fd3ec2d2f40c6947e | /KI/migrations/0003_rename_ksiazka_book.py | 5c3a09b788e4192109a953f6aae6270fb6e96097 | [] | no_license | Kaczmarek-M/Python-projekt | c601c8efe9aecfa9076f28c6a3674e2b66114fc1 | 6b3858863bf494ecad62cc01b92d40489cca60c1 | refs/heads/master | 2023-06-09T07:49:11.720850 | 2021-07-03T18:55:28 | 2021-07-03T18:55:28 | 382,643,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | # Generated by Django 3.2.5 on 2021-07-01 18:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('KI', '0002_rename_ksiazki_ksiazka'),
]
operations = [
migrations.RenameModel(
old_name='Ksiazka',
new_name='Book',
),
]
| [
"[email protected]"
] | |
ba1e6e2172af45d6b15aef80027083855e6859c9 | 6f4abf75355b2d5b1e960841555ce09b07cbfee1 | /admin/IDN.py | b3188de71246d48226b4514d754b99e119cf3a26 | [] | no_license | dedene/postfix-cyrus-mysql | b0bf9e59e82549041c2be5cbf2ec2e8e15ab5324 | e7678a7d3b675cf399035db8d828c5b5f53b482c | refs/heads/master | 2020-12-29T03:07:54.225490 | 2010-02-27T11:37:14 | 2010-02-27T11:37:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,692 | py | #!/usr/bin/env python
from encodings import idna
from types import *
def mightDecode(s):
if s[:4] == "xn--":
return idna.ToUnicode(s)
return s
def applySplit(s, f):
foo = s.split("@")
if len(foo) > 1:
r = foo[0]
for i in foo[1:]:
r += "@" + applySplit(i, f)
if type(r) == StringType:
return r.decode("iso-8859-15")
return r
l = []
for i in s.split("."):
l.append(f(i))
return ".".join(l)
def latin12p(s):
if s == "":
return s
if type(s) == TupleType or type(s) == ListType:
return map(latin12p, s)
if type(s) == StringType:
s = s.decode("iso-8859-15")
if type(s) == UnicodeType:
return applySplit(s, idna.ToASCII)
return s
def utf82p(s):
if s == "":
return s
if type(s) == TupleType or type(s) == ListType:
return map(utf82p, s)
if type(s) == StringType:
s = s.decode("utf-8")
if type(s) == UnicodeType:
return applySplit(s, idna.ToASCII)
return s
def p2latin1(s):
if s == "":
return s
if type(s) == TupleType or type(s) == ListType:
return map(p2latin1, s)
if type(s) == StringType:
try:
value = applySplit(s, mightDecode)
return value.encode("iso-8859-15")
#return applySplit(s, mightDecode).encode("iso-8859-15")
except UnicodeEncodeError:
return applySplit(s, mightDecode)
return s
def p2utf8(s):
if s == "":
return s
if type(s) == TupleType or type(s) == ListType:
return map(p2utf8, s)
if type(s) == StringType:
return applySplit(s, mightDecode).encode("utf-8")
return s
| [
"[email protected]"
] | |
80f794aa9ba66b1b969e6c17c2c50a7b1e2cd9b1 | 218508c2fd7ba70b2be45e0934673744b3735839 | /students/PeterMadsen/session05/subclasses.py | 4c9ba5d4be3836dfea69b5d193d39350de86304e | [] | no_license | finddeniseonline/sea-c34-python.old | 7e560502c749cfb7c78bd237400cd0f45efb2aca | 4ae3d9e87dcdce17e44135c2d478f0b023d68b88 | refs/heads/master | 2021-01-18T17:08:09.721529 | 2015-05-12T04:07:42 | 2015-05-12T04:07:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,583 | py | class SpecialList(list):
def __init__(self):
list.__init__(self)
# Question 1
"""can you override Python default methods to deal with your object"""
def append(self, value):
self[0] = value
# Question 2
def replace(self, index, value):
"""
How would you write a method that would allow you to replace
a value in a list with a different value but would return the original
value?
"""
try:
temp = self[index]
self[index] = value
return temp
except IndexError as e:
print(e)
# Question 3
def spam(self):
"""
How would you have a method to overwrite all entries in a
SpecialList with 'spam'
"""
for index in range(len(self)):
self[index] = 'spam'
# Question 4
def switch(self, a, b):
"""
How would you write a method that extends the functionality of
lists so that you can switch the value of the list at 'a' with the
value of the list at 'b'?
"""
try:
self[a], self[b] = self[b], self[a]
except IndexError as e:
print("That index is out of bounds", e)
# Test Code -----
if __name__ == '__main__':
test_list = SpecialList()
test_list.extend([1, 2, 3, 4, 5])
test_list.append(45)
print(test_list)
test_list.switch(0, 2)
print(test_list)
test_list.spam()
print(test_list)
test_var = test_list.replace(4, 12312)
print(test_list)
print(test_var)
| [
"[email protected]"
] | |
59788b2cafcd5d7900a6540fc788ae28c818d81d | b0b33c86509c5fb33e6e66e674d6f3bf8f91ef92 | /codes/M3/Music_Mood_Lifter_With_Web_Function/mysite/musicMoodLifter/urls.py | 5ec45ce255ff1016b67409930c4ad745c6dbbb28 | [] | no_license | V3rdant-byte/MusicMoodLifter | c7a6265ffdd8f7d1787efc510780dc78795f8722 | 76f8bd07ec23b43f8d5cbcff7bf530947b16b3fe | refs/heads/main | 2023-05-08T22:56:11.355554 | 2021-06-02T06:28:59 | 2021-06-02T06:28:59 | 368,715,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | # backend/server/apps/endpoints/urls.py file
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from .views import EndpointViewSet
from .views import MLAlgorithmViewSet
from .views import MLRequestViewSet
from .views import PredictView # import PredictView
router = DefaultRouter(trailing_slash=False)
router.register(r"endpoints", EndpointViewSet, basename="endpoints")
router.register(r"mlalgorithms", MLAlgorithmViewSet, basename="mlalgorithms")
router.register(r"mlrequests", MLRequestViewSet, basename="mlrequests")
urlpatterns = [
url(r"^api/v1/", include(router.urls)),
# add predict url
url(
r"^api/v1/(?P<endpoint_name>.+)/predict$", PredictView.as_view(), name="predict"
),
] | [
"[email protected]"
] | |
4fdb90f42debae152e7675bb4fe6bcec91b00970 | 8f2adbcac084e3e050fbd28d52b67513f8445f60 | /TDD/superlists/TestApp/tests.py | 8f25a46b06b38d6ceec4eda8b91316ce72517b42 | [] | no_license | mattew8/Anything | ce8c9127a5508113f57f02df0f1e24b92a902f0c | 88c1a34a8b53d9a2e4d313e39b40925dfc708b11 | refs/heads/master | 2023-03-11T23:54:25.110567 | 2021-03-04T07:10:27 | 2021-03-04T07:10:27 | 325,308,693 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | from django.test import TestCase
# from django.core.urlresolvers import resolve
from django.urls import reverse
from TestApp.views import home
class SmokeTest(TestCase):
def test_root_url(self):
found = reverse('/')
# "/"가 호출될 때 reverse를 실행해서 home_page라는 함수를 호출!
self.assertEqual(found.func, home_page)
| [
"[email protected]"
] | |
56926e8b13956deb65a9d46864ce5c986f68d64b | ed84ceb0b017ed7e8fe6fd3bd57a100e448e4cb7 | /setup.py | 6a7b815dbba18241e2f5712c7d8ac45cd6de697a | [] | no_license | rrader/zk_phone | 4ed7d42d5f0bf8a1216d5fb0fabbf91c8585b401 | bd23f0926b94eec4e3f1f0c0bd900d6990883ebd | refs/heads/master | 2020-12-30T15:41:38.026950 | 2017-09-14T10:12:42 | 2017-09-14T10:12:42 | 91,161,441 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 550 | py | #!/usr/bin/env python
from setuptools import setup
setup(
name='zk_phone',
version='1.0',
description='Zakupki Phone',
author='Roman Rader',
author_email='[email protected]',
url='https://github.com/rrader/zk_phone',
install_requires=[
'pad4pi==1.0.0',
'charlcd==0.4.0',
'RPi.GPIO==0.6.3',
'netifaces',
],
entry_points={
'console_scripts': [
'zk_phone = zk_phone.main:main',
'zk_phone_simulator = zk_phone.lib.simulator.main',
],
},
)
| [
"[email protected]"
] | |
89594222799bd5fedf98053e52b981f25a662f15 | 628ab6e412e7c4c755bc42d8137acd3da2d4be0e | /tests/console/test__trace.py | ce46746027d4360cee54b5677735b9922033fe1b | [
"MIT",
"CC-BY-4.0"
] | permissive | TrendingTechnology/apysc | ffd7d9b558707b934c5df127eca817d4f12d619b | 5c6a4674e2e9684cb2cb1325dc9b070879d4d355 | refs/heads/main | 2023-06-01T20:19:20.835539 | 2021-06-20T03:53:33 | 2021-06-20T03:53:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | from random import randint
from retrying import retry
from apysc import Stage
from apysc import trace
from apysc.expression import expression_file_util
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test_trace() -> None:
stage: Stage = Stage()
trace(stage, 100, 'Hello!')
expression: str = expression_file_util.get_current_expression()
expected: str = (
f'console.log({stage.variable_name}, "100", "Hello!");'
)
assert expected in expression
| [
"[email protected]"
] | |
03e066e76180050175f2d916800c45b7b5bdbac5 | 2b99e82d3307ae47fac09db249c1633b5d401de0 | /testapp/testapp/testmain/tests/test_webservices.py | 5c9cd6f24eca6047226d460299a28093467dc94c | [
"ISC",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | nuxion/django-afip | 2b5e097a01e129de74a9a703919d4f70aad79140 | 7ecc1090680e6f5b3cb9f6ee46716b1d1349f0fb | refs/heads/master | 2023-01-04T01:19:17.926243 | 2020-10-30T09:35:24 | 2020-10-30T09:35:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,261 | py | """Tests for AFIP-WS related classes."""
import os
from datetime import datetime
from datetime import timedelta
from unittest import skip
from unittest.mock import patch
import pytest
from django.conf import settings
from django.core import management
from django.test import TestCase
from django.utils.timezone import now
from factory.django import FileField
from django_afip import exceptions
from django_afip import factories
from django_afip import models
from testapp.testmain.tests.testcases import LiveAfipTestCase
from testapp.testmain.tests.testcases import PopulatedLiveAfipTestCase
@pytest.mark.live
class AuthTicketTest(TestCase):
"""Test AuthTicket methods."""
def test_bad_cuit(self):
"""Test using the wrong cuit for a key pair."""
taxpayer = factories.AlternateTaxpayerFactory(cuit=20329642339)
taxpayer.create_ticket("wsfe")
with self.assertRaisesRegex(
exceptions.AfipException,
# Note: AFIP apparently edited this message and added a typo:
"ValidacionDeToken: No apareci[oó] CUIT en lista de relaciones:",
):
models.populate_all()
def test_bogus_certificate_exception(self):
"""Test that using a junk ceritificates raises as expected."""
# New TaxPayers will fail to save with an invalid cert, but many
# systems may have very old TaxPayers, externally created, or other
# stuff, so this scenario might still be possible.
with patch(
"django_afip.models.TaxPayer.get_certificate_expiration",
spec=True,
return_value=None,
):
taxpayer = factories.TaxPayerFactory(
key=FileField(data=b"Blah"),
certificate=FileField(data=b"Blah"),
)
with self.assertRaises(exceptions.CorruptCertificate) as e:
taxpayer.create_ticket("wsfe")
self.assertNotIsInstance(e, exceptions.AfipException)
def test_no_active_taxpayer(self):
"""Test that no TaxPayers raises an understandable error."""
with self.assertRaisesMessage(
exceptions.AuthenticationError,
"There are no taxpayers to generate a ticket.",
):
models.AuthTicket.objects.get_any_active("wsfe")
def test_expired_certificate_exception(self):
"""Test that using an expired ceritificate raises as expected."""
with open(os.path.join(settings.BASE_DIR, "test_expired.key"),) as key, open(
os.path.join(settings.BASE_DIR, "test_expired.crt"),
) as crt:
taxpayer = factories.TaxPayerFactory(
key=FileField(from_file=key),
certificate=FileField(from_file=crt),
)
with self.assertRaises(exceptions.CertificateExpired):
taxpayer.create_ticket("wsfe")
def test_untrusted_certificate_exception(self):
"""
Test that using an untrusted ceritificate raises as expected.
"""
# Note that we hit production with a sandbox cert here:
taxpayer = factories.TaxPayerFactory(is_sandboxed=False)
with self.assertRaises(exceptions.UntrustedCertificate):
taxpayer.create_ticket("wsfe")
class PopulationTest(LiveAfipTestCase):
"""
Tests models population view.
As a side effect, also test valid ticket generation.
"""
def test_population_command(self):
"""Test the afipmetadata command."""
management.call_command("afipmetadata")
receipts = models.ReceiptType.objects.count()
concepts = models.ConceptType.objects.count()
documents = models.DocumentType.objects.count()
vat = models.VatType.objects.count()
tax = models.TaxType.objects.count()
currencies = models.CurrencyType.objects.count()
self.assertGreater(receipts, 0)
self.assertGreater(concepts, 0)
self.assertGreater(documents, 0)
self.assertGreater(vat, 0)
self.assertGreater(tax, 0)
self.assertGreater(currencies, 0)
def test_metadata_deserialization(self):
"""Test that we deserialize descriptions properly."""
management.call_command("afipmetadata")
# This asserting is tied to current data, but it validates that we
# don't mess up encoding/decoding the value we get.
# It _WILL_ need updating if the upstream value ever changes.
fac_c = models.ReceiptType.objects.get(code=11)
self.assertEqual(fac_c.description, "Factura C")
class TaxPayerTest(LiveAfipTestCase):
"""Test TaxPayer methods."""
def test_fetch_points_of_sale(self):
"""Test the ``fetch_points_of_sales`` method."""
taxpayer = models.TaxPayer.objects.first()
taxpayer.fetch_points_of_sales()
points_of_sales = models.PointOfSales.objects.count()
self.assertGreater(points_of_sales, 0)
class ReceiptQuerySetTestCase(PopulatedLiveAfipTestCase):
"""Test ReceiptQuerySet methods."""
def _good_receipt(self):
receipt = factories.ReceiptFactory(
point_of_sales=models.PointOfSales.objects.first(),
)
factories.VatFactory(vat_type__code=5, receipt=receipt)
factories.TaxFactory(tax_type__code=3, receipt=receipt)
return receipt
def _bad_receipt(self):
receipt = factories.ReceiptFactory(
point_of_sales=models.PointOfSales.objects.first(),
document_type__code=80,
)
factories.VatFactory(vat_type__code=5, receipt=receipt)
factories.TaxFactory(tax_type__code=3, receipt=receipt)
return receipt
def test_validate_empty(self):
factories.ReceiptFactory()
errs = models.Receipt.objects.none().validate()
self.assertEqual(errs, [])
self.assertEqual(models.ReceiptValidation.objects.count(), 0)
def test_validation_good(self):
"""Test validating valid receipts."""
r1 = self._good_receipt()
r2 = self._good_receipt()
r3 = self._good_receipt()
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 0)
self.assertEqual(
r1.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(
r2.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(
r3.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(models.ReceiptValidation.objects.count(), 3)
def test_validation_bad(self):
"""Test validating invalid receipts."""
self._bad_receipt()
self._bad_receipt()
self._bad_receipt()
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 1)
self.assertEqual(
errs[0],
"Error 10015: Factura B (CbteDesde igual a CbteHasta), DocTipo: "
"80, DocNro 203012345 no se encuentra registrado en los padrones "
"de AFIP y no corresponde a una cuit pais.",
)
self.assertQuerysetEqual(models.ReceiptValidation.objects.all(), [])
def test_validation_mixed(self):
"""
Test validating a mixture of valid and invalid receipts.
Receipts are validated by AFIP in-order, so all receipts previous to
the bad one are validated, and nothing else is even parsed after the
invalid one.
"""
r1 = self._good_receipt()
self._bad_receipt()
self._good_receipt()
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 1)
self.assertEqual(
errs[0],
"Error 10015: Factura B (CbteDesde igual a CbteHasta), DocTipo: "
"80, DocNro 203012345 no se encuentra registrado en los padrones "
"de AFIP y no corresponde a una cuit pais.",
)
self.assertQuerysetEqual(
models.ReceiptValidation.objects.all(),
[r1.pk],
lambda rv: rv.receipt_id,
)
def test_validation_validated(self):
"""Test validating invalid receipts."""
receipt = self._good_receipt()
models.ReceiptValidation.objects.create(
result=models.ReceiptValidation.RESULT_APPROVED,
cae="123",
cae_expiration=now(),
receipt=receipt,
processed_date=now(),
)
errs = models.Receipt.objects.all().validate()
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
self.assertEqual(errs, [])
def test_validation_good_service(self):
"""Test validating a receipt for a service (rather than product)."""
receipt = self._good_receipt()
receipt.concept_id = 2
receipt.service_start = datetime.now() - timedelta(days=10)
receipt.service_end = datetime.now()
receipt.expiration_date = datetime.now() + timedelta(days=10)
receipt.save()
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 0)
self.assertEqual(
receipt.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
def test_validation_good_without_tax(self):
"""Test validating valid receipts."""
receipt = factories.ReceiptFactory(
point_of_sales=models.PointOfSales.objects.first(),
total_amount=121,
)
factories.VatFactory(vat_type__code=5, receipt=receipt)
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 0)
self.assertEqual(
receipt.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
def test_validation_good_without_vat(self):
"""Test validating valid receipts."""
receipt = factories.ReceiptFactory(
point_of_sales=models.PointOfSales.objects.first(),
receipt_type__code=11,
total_amount=109,
)
factories.TaxFactory(tax_type__code=3, receipt=receipt)
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 0)
self.assertEqual(
receipt.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
@skip("Currently not working -- needs to get looked at.")
def test_validation_with_observations(self):
receipt = factories.ReceiptFactory(
document_number=20291144404,
document_type__code=80,
point_of_sales=models.PointOfSales.objects.first(),
receipt_type__code=1,
)
factories.VatFactory(vat_type__code=5, receipt=receipt)
factories.TaxFactory(tax_type__code=3, receipt=receipt)
errs = models.Receipt.objects.all().validate()
self.assertEqual(len(errs), 0)
self.assertEqual(
receipt.validation.result,
models.ReceiptValidation.RESULT_APPROVED,
)
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
self.assertEqual(models.Observation.objects.count(), 1)
self.assertEqual(receipt.validation.observations.count(), 1)
def test_credit_note(self):
"""Test validating valid a credit note."""
# Create an invoice (code=6) and validate it...
invoice = self._good_receipt()
errs = models.Receipt.objects.filter(pk=invoice.pk).validate()
self.assertEqual(len(errs), 0)
self.assertEqual(models.ReceiptValidation.objects.count(), 1)
# Now create a credit note (code=8) and validate it...
credit = self._good_receipt()
credit.receipt_type = factories.ReceiptTypeFactory(code=8)
credit.related_receipts.set([invoice])
errs = models.Receipt.objects.filter(pk=credit.pk).validate()
self.assertEqual(len(errs), 0)
self.assertEqual(models.ReceiptValidation.objects.count(), 2)
| [
"[email protected]"
] | |
b331c79041b99e2b04adb03090428a5e3733b8c3 | ed629bbbc6a76c275db6d9bd62c06b72d3f7d471 | /sockets/client_skeleton.py | bf802a7ec3ce631cc550750860b8710bfa209751 | [] | no_license | rtgfd157/ElevatorControllerProj | b134d3c8035680e0b9c6f758d16be16e28bc55b6 | 11b0b2b623652ebbecc4a5c5606b3843f2696666 | refs/heads/main | 2023-03-04T05:14:59.546424 | 2021-02-07T11:31:39 | 2021-02-07T11:31:39 | 314,550,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,428 | py | import socket
import pickle
import struct
import sys
import logging
SERVER_IP = "127.0.0.1" # Our server will run on same computer as client
SERVER_PORT = 5678
logging.basicConfig(level=logging.INFO)
# HELPER SOCKET METHODS
def recieve_message(conn):
data = conn.recv(1024).decode()
data_variable = pickle.loads(data)
logging.info(" client data recived ", data_variable)
return data_variable
def send_message(conn, message):
logging.info(" client message send ", message)
data_string = pickle.dumps(message)
print("data string: ", data_string)
conn.send(data_string)
def connect():
my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
my_socket.connect((SERVER_IP, SERVER_PORT))
return my_socket
if __name__ == '__main__':
socket = connect()
command = ''
if len(sys.argv) >= 2:
command = sys.argv[1]
if command == 's':
d = {'command': 's'}
elif command == 'floor_up' :
d = {'command': 'floor_up', 'floor': str(sys.argv[2])}
elif command == 'floor_up' :
d = {'command': 'floor_down', 'floor': str(sys.argv[2])}
elif command == 'el_button_press' :
d = {'command': 'el_button_press', 'elevator_number': str(sys.argv[2]), 'button_number': str(sys.argv[3]) }
else:
d = {'command': 'unknown command'}
send_message(socket, d)
| [
"[email protected]"
] | |
41570a7ae5f758f2a8e67f2f238b1044736cccc7 | 8400dc347281058481c3b8a926493fde0bd7d5a7 | /src/orienta.py | eb1eb73a05e28b27ed7e0474042214de829aead9 | [] | no_license | lva98/PIM0001 | 926c9b7980d4b324cde88fd68edf89be60cba94a | 09794c3ec8d0a0633e5bae1bc7dfa0756c5d3444 | refs/heads/master | 2021-09-08T20:43:06.475160 | 2018-03-12T04:23:57 | 2018-03-12T04:23:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,012 | py | import numpy as np
import scipy
import matplotlib.pyplot as plt
from mylib.convolucao import *
from scipy import stats
from mylib.saveDataBase import *
#from skimage import data
#from skimage.util import img_as_float
#from skimage.filters import gabor_kernel
def getSobelMapGxGy(img, operador, i, j):
valorGw = 0
for ii in range(0, len(img)):
for jj in range(0, len(img[0])):
valorGw += operador[ii][jj] * img[ii][jj]
return valorGw
"""
__GLOBAL_VARS__
"""
mapa = []
mapaGx = []
mapaGy = []
mapaTheta = []
gsOperator = initMatrix(7, 7, 1)
sobelGy = [(-1, -2, -1),
( 0, 0, 0),
( 1, 2, 1)]
sobelGx = [(-1, 0, 1),
(-2, 0, 2),
(-1, 0, 1)]
"""
__MAIN__
"""
nomeImg = "img*/" + sys.argv[2]
tipoImg = sys.argv[1]
opc = sys.argv[3]
nome = ''
if(opc == 'salvar'):
nome = sys.argv[4]
Img = scipy.misc.imread(nomeImg + "." + tipoImg)
Img2 = plt.imread(nomeImg.replace('*', '') + "." + tipoImg)
mapaGy = convoluir(Img, sobelGy, getSobelMapGxGy)
mapaGx = convoluir(Img, sobelGx, getSobelMapGxGy)
t = 8
mapaTheta = initMatrix(Img.shape[1]/t, Img.shape[0]/t, -1)
print '->Gerando mapa de Orientacoes'
for i in range(0, len(Img), t):
for j in range(0, len(Img[0]), t):
sumGsy = 0
sumGsx = 1
vet = []
for ii in range(0, t):
for jj in range(0, t):
sumGsy += 2 * mapaGx[ii+i][jj+j] * mapaGy[ii+i][jj+j]
sumGsx += pow(mapaGx[ii+i][jj+j], 2) - pow(mapaGy[ii+i][jj+j], 2)
vet.append(Img[ii+i][jj+j])
if(math.sqrt(np.var(vet)) > 30):
phi = 0.5 * np.arctan2(sumGsy, sumGsx)
k = 0
if(phi < 0 and sumGsy < 0) or (phi >= 0 and sumGsy > 0):
k = 0.5
elif(phi < 0 and sumGsy >= 0):
k = 1.0
elif(phi >= 0 and sumGsy <= 0):
k = 0
theta = np.rad2deg(phi + k*math.pi)
if(theta > 0 and theta < 45):
theta -= (theta)*2
elif(theta > 45 and theta < 90):
theta += (90 - theta)*2
elif(theta > 90 and theta < 135):
theta -= (theta - 90)*2
elif(theta > 135 and theta < 180):
theta += (180 - theta)*2
elif(theta > 180 and theta < 225):
theta -= (theta - 180)*2
elif(theta > 225 and theta < 270):
theta += (270 - theta)*2
elif(theta >270 and theta < 315):
theta -= (theta - 270)*2
elif(theta > 315 and theta < 360):
theta += (360 - theta)*2
mapaTheta[i/8][j/8] = theta
if(opc == 'salvar'):
plot_point((j, i), theta, 3)
if(opc == 'salvar'):
print '->Salvando info em BD'
salvarBanco(nome, mapaTheta)
plt.imshow(Img2, alpha = 0.5)
plt.show()
else:
print '->Buscando digital em BD'
buscarBanco(mapaTheta)
print 'OK!'
| [
"[email protected]"
] | |
482f361a3f716b88e023c179adbaed97e6a9f030 | e5068b611850e24c4ac614cc0c1d451fa4dff0df | /add two numbers/add2num.py | f43e00e1cb49d843f4ed7616048a93c2ee3695b6 | [] | no_license | nokk3r-mospolytech/py-mat-sorting-logics-4 | 2ec84bc348bbb818361b0be77160894de774fa8f | 57c220fe4ddf23830ecb8ede7e78f61707f6a6b0 | refs/heads/master | 2023-05-23T10:14:25.307181 | 2021-06-09T22:00:03 | 2021-06-09T22:00:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | class Solution:
def nextPermutation(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
index = len(nums) - 2
while index >= 0:
if nums[index] < nums[index + 1]:
break
index -= 1
if index < 0:
nums.sort()
return
nextIndex = index + 1
while nextIndex < len(nums) and nums[nextIndex] > nums[index]:
nextIndex += 1
# swap index and nextIndex
nums[index], nums[nextIndex - 1] = nums[nextIndex - 1], nums[index]
nums[index + 1:] = nums[index + 1:][::-1] | [
"[email protected]"
] | |
ba01ecea1354ffd90c89a9317db387e811dc7e40 | 98b518b8bf4aef1576b146330546acf687a6ab46 | /ipcam/wsgi.py | 2ce2a7024d43bce371d33ad05e9bf573859ab9d0 | [] | no_license | prabhat-g97/IP-Cam-Solutions | d31ea2c595d67120a9cfc8324c3188b2bb88591c | 3a3cf873170774d4da49c02d0f2eecb6a6aa617e | refs/heads/master | 2023-03-06T01:25:08.622570 | 2021-02-21T06:11:50 | 2021-02-21T06:11:50 | 340,638,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
WSGI config for ipcam project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ipcam.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
c7d7acd1729de9136517c52cb740e9fc96bd2ebe | 4e60524362802c3fffd0789b554fda65400b014f | /halo_player/src/player.py | abf079678122e6f307548004acd373adf4c8b28b | [
"Apache-2.0"
] | permissive | John-Thunder/acs-logging-test | d8bfa8534c947ad98f7aec90ce291ef2dd643774 | 185868c5bfc7d1ea8d17ec4c8384e44ac80a39ff | refs/heads/master | 2021-01-11T18:24:47.107282 | 2016-10-01T16:44:54 | 2016-10-01T16:44:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,316 | py | import config
from log import Log
from summaryTable import SummaryTable
import json
class PlayerService:
def __init__(self):
self.log = Log()
self.summary = SummaryTable(config.AZURE_STORAGE_ACCOUNT_NAME, config.AZURE_STORAGE_ACCOUNT_KEY, config.AZURE_STORAGE_SUMMARY_TABLE_NAME)
def getPlayer(self, gamertag):
player = {
"gamertag": gamertag,
"stats": self.getStats(gamertag)
}
return player
def getStats(self, gamertag):
stats = []
count_type = gamertag + "matchwon"
count = self.summary.getCount(count_type)
stats.append({"Wins": count})
count_type = gamertag + "matchtied"
count = self.summary.getCount(count_type)
stats.append({"Ties": count})
count_type = gamertag + "matchlost"
count = self.summary.getCount(count_type)
stats.append({"Lost": count})
count_type = gamertag + "matchDNF"
count = self.summary.getCount(count_type)
stats.append({"DNF": count})
return stats
if __name__ == "__main__":
player_service = PlayerService()
players = [player.strip() for player in config.HALO_GAMERTAGS.split(',')]
for gamertag in players:
print(json.dumps(player_service.getPlayer(gamertag)))
| [
"[email protected]"
] | |
0761890b6354a438f49f760f4dda99bd8c4cc5cc | 368ec23355481c81f32d62618504cbc8b73bec9b | /Computational Investing/Metrics.py | 5cd66643aa9dd8cc5edb10d37224a93b9141642b | [] | no_license | manuwhs/Coursera-Codes | f6d38d30f55b55b651101319031acb9d18c51125 | 771b208c3645953ba4b93ee279ec73af181a9edd | refs/heads/master | 2021-01-24T01:02:25.175360 | 2018-02-25T01:16:34 | 2018-02-25T01:16:34 | 122,794,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | # -*- coding: utf-8 -*-
"""
u
"""
import functions as fu
import numpy as np
### LOADING FUNCTIONS ###
data = fu.load_dataset("./aapl.csv")
price = np.array(data["Close"]);
labels = ["Apple Price at Close", "Days", "Dollars"]
fu.plot_graph([],price,labels, 1)
Returns = fu.get_Return(price)
labels = ["Apple Return Price at Close", "Days", "%"]
fu.plot_graph([],Returns,labels, 1)
E_Return = np.mean(Returns)
std_Return = np.std(Returns)
SharpR = fu.get_SharpR(Returns)
SortinoR = fu.get_SortinoR(Returns)
print "Expected Dayly Return: " + str(E_Return)
print "STD of Return: " + str(std_Return)
print "Sharp Ratio: " + str(SharpR)
print "Sortino Ratio: " + str(SortinoR) | [
"[email protected]"
] | |
dfa05cec56fc6d3790362c07eacd29a72d036d24 | ca2f520ba362cbe0919fb91d66c7c6f4300902ba | /graph.py | c2f546b69ad725f75260f573e2e07258eb7237fa | [] | no_license | rbilgil/travelling-tourist | ea1dd0541a406593b1410b601227b42e7551bb88 | 0f03364296fcee17e214d1298b35e08bc52b80a8 | refs/heads/master | 2021-01-10T19:09:01.346848 | 2015-05-08T20:10:00 | 2015-05-08T20:10:00 | 34,953,240 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,822 | py | from robin_queue import Queue
from robin_queue import PriorityQueue
from heapq import *
import json
class Graph:
graph = {}
weights = {}
edge_properties = {}
def as_json(self):
return json.dumps([
self.graph,
self.weights,
self.edge_properties
])
def from_json(self, data):
obj = json.loads(data)
self.graph = obj[0]
self.weights = obj[1]
self.edge_properties = obj[2]
def from_json_file(self, file):
with open(file, "r") as f:
self.from_json(f.read())
def reset(self):
self.graph = {}
self.weights = {}
self.edge_properties = {}
def vertices(self):
return self.graph.keys()
def edges(self):
edges = []
for vertex in self.vertices():
for neighbour in self.get_neighbours(vertex):
edges.append({ "from": vertex, "to": neighbour, "weight": self.get_weight(vertex, neighbour), "properties": self.get_edge_properties(vertex, neighbour)})
return edges
def get_weight(self, start, end):
weight_index = self.graph[start].index(end)
return self.weights[start][weight_index]
def get_edge_properties(self, start, end):
edge_index = self.graph[start].index(end)
return self.edge_properties[start][edge_index]
def add_vertex(self, vertex):
if not self.has_vertex(vertex):
self.graph[vertex] = []
self.weights[vertex] = []
self.edge_properties[vertex] = []
def has_vertex(self, vertex):
return vertex in self.vertices()
def vertex_empty(self, vertex):
if self.has_vertex(vertex):
return len(self.graph[vertex]) == 0
else:
return True
def add_edge(self, start, end, directed = False, weight = 0, edge_properties = {}): # pass in any additional edge info as a hash
if self.has_vertex(start) and self.has_vertex(end):
if not start == end and not self.has_edge(start, end):
self.graph[start].append(end)
self.weights[start].append(weight)
self.edge_properties[start].append(edge_properties)
if not directed:
self.graph[end].append(start)
self.weights[end].append(weight)
self.edge_properties[end].append(edge_properties)
else:
raise Exception("Vertex " + start + " or " + end + " doesn't exist")
def has_edge(self, start, end):
return end in self.get_neighbours(start)
def are_neighbours(self, start, end):
return self.has_edge(start, end)
def get_neighbours(self, vertex):
return self.graph[vertex]
def are_connected(self, start, end):
return end in self.bfs(start)
def is_connected_graph(self):
vertices = self.vertices()
for vertex in vertices:
traversal = self.bfs(vertex)
if len(traversal) == len(vertices) - 1:
return False
return True
def bfs(self, current_node):
visited = []
queue = Queue()
queue.enqueue(current_node)
visited.append(current_node)
while not queue.empty():
current_node = queue.dequeue()
for neighbour in self.get_neighbours(current_node):
if not neighbour in visited:
queue.enqueue(neighbour)
visited.append(neighbour)
return visited
def shortest_path(self, start, end): #uses dijkstra
distances = { start: 0 }
previous_vertices = {}
queue = PriorityQueue()
for v in self.vertices():
if v != start:
distances[v] = float("inf")
queue.queue(v, distances[v])
while not queue.empty():
weight, u = queue.dequeue()
for v in self.get_neighbours(u):
alternate_distance = distances[u] + self.get_weight(u, v)
if alternate_distance < distances[v]:
distances[v] = alternate_distance
previous_vertices[v] = u
queue.queue(v, alternate_distance)
trace = previous_vertices[end]
path = []
while trace in previous_vertices:
next = previous_vertices[trace]
path.insert(0, (trace, self.get_weight(trace, next), self.get_edge_properties(trace, next)["line"]))
trace = next
total_dist = sum([x[1] for x in path])
return total_dist, path
| [
"[email protected]"
] | |
a6ec0da92201d0c47e23a3c07660c006c52bd8e0 | b983144f3fd4512a9d4e2bcdc66be5c46924cd91 | /dp_builder/__main__.py | 5c66ce9bffa31fff70610c4dbbe02346a0f3521b | [
"MIT"
] | permissive | lexover/django_project_builder | 9e1aa53e968f5d95c01b63399f2fc94609e70b4c | 3c74f6f05c5b2d3cbdcc1868fec06ab463e3875f | refs/heads/main | 2023-03-08T22:29:49.706506 | 2021-03-01T14:43:47 | 2021-03-01T15:13:53 | 343,432,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | from . import dp_builder
if __name__ == '__main__':
dp_builder()
| [
"[email protected]"
] | |
eccb090631518fd4730c8d3997126bfe544be42c | fc28f2bfd212586e33e2abc412e3bf8e7cffef7b | /FlaskApp/Optimization_model.py | 14f2998cab1e61a390862f5526399f92e56d26f0 | [] | no_license | ugan9eds/TEAM_122_DVA | 0feab6c22eca684d486ee30eb7fe754cff0ea3df | ded76196f697ba75f5710b773a633dc928422d88 | refs/heads/master | 2022-12-23T21:07:10.095002 | 2020-04-18T04:03:15 | 2020-04-18T04:03:15 | 236,345,208 | 0 | 0 | null | 2022-12-09T22:12:48 | 2020-01-26T17:06:13 | Jupyter Notebook | UTF-8 | Python | false | false | 2,869 | py | #!/usr/bin/env python
# coding: utf-8
# In[1]:
def optimization_model(data,already_selected=[]):
import cvxpy as cp
import numpy as np
import pandas as pd
# In[2]:
# In[14]:
#Test of forcing player selection
already_selected_data = data.loc[data['Name'].isin(already_selected)]
C_selected = already_selected_data.loc[already_selected_data['Position']=='C'].count()['Position']
D_selected = already_selected_data.loc[already_selected_data['Position']=='D'].count()['Position']
W_selected = already_selected_data.loc[already_selected_data['Position'].isin(['LW','RW'])].count()['Position']
G_selected = already_selected_data.loc[already_selected_data['Position']=='G'].count()['Position']
total_selected = C_selected + W_selected + D_selected + G_selected
selected_salary = np.sum(already_selected_data['Salary'])
data = data.loc[~data['Name'].isin(already_selected)]
# Create Variable arrays for model
# In[15]:
salary = np.array(data['Salary'])
projected_DFS = np.array(data['lgbm_projection'])
center = np.array(data['Position']=='C')
winger = np.array(np.logical_or(data['Position']=='LW',data['Position']=='RW'))
defense = np.array(data['Position']=='D')
goalie = np.array(data['Position']=='G')
selection = cp.Variable(len(salary), boolean=True)
budget = 50000-selected_salary
max_players = 8-total_selected
# Create Constraints
# In[16]:
budget_constraint = salary*selection <= budget
player_constraint = sum(selection) == max_players
center_min = selection*center >=2-C_selected
center_max = selection*center <=3-C_selected
winger_min = selection*winger >=2-W_selected
winger_max = selection*winger <=3-W_selected
defender_min = selection*defense >=2-D_selected
defender_max = selection*defense <=3-D_selected
goalie_constraint = selection*goalie == 1-G_selected
# Objective Function
# In[17]:
total_projected_value = projected_DFS * selection
objective = cp.Problem(cp.Maximize(total_projected_value), [budget_constraint,player_constraint, center_min, center_max, winger_min, winger_max, defender_min, defender_max, goalie_constraint])
# In[18]:
objective.solve()
# Create Optimal Player List
# In[19]:
opt_selection = selection.value >= 0.9
player_list = data['Name'][opt_selection].append(already_selected_data['Name'])
opt_positions = data['Position'][opt_selection].append(already_selected_data['Position'])
opt_salary = data['Salary'][opt_selection].append(already_selected_data['Salary'])
return player_list.tolist()
#print(player_list)
#print(opt_positions)
#print(np.sum(opt_salary))
| [
"[email protected]"
] | |
76140d045f6413a599080d17142efe2e7cb1b40d | 5f490d281ea385a692e7090a28867c47a82ccf61 | /app/__init__.py | 8c7d2aad3cd244daad69e7d41f32c8fa4b57b182 | [] | no_license | agoe/FlaskLogicBankSFRS_Demo | 7db0a93255ec8c4961ddcd32f030a7e369f0a8c1 | 62857caee4ba22a870f22b5f00da6f8ce1b742bd | refs/heads/master | 2023-02-12T04:56:00.325377 | 2021-01-06T15:46:37 | 2021-01-06T15:46:37 | 327,120,220 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,163 | py | from admin.admin_view_ext import AdminViewExt
from db import db, session
from flask import Flask
from api.json_encoder import SAFRSJSONEncoderExt
try:
from flask_admin import Admin
from flask_admin.contrib import sqla
except:
print("Failed to import flask-admin")
from safrs import SAFRSAPI
from flask_admin.contrib import sqla
import models
import logic
from models import User, Book
def create_app(config_filename=None, host="localhost"):
app = Flask("LogicBank Demo App")
app.config.from_object("config.Config")
# app.config.update(SQLALCHEMY_DATABASE_URI="sqlite://")
db.init_app(app)
with app.app_context():
db.create_all()
# Populate the db with users and a books and add the book to the user.books relationship
# session.commit()
for i in range(1):
user = User(name=f"user{i}", email=f"email{i}@email.com")
book = Book(name=f"test book {i}")
user.books.append(book)
session.commit()
create_api(app, host)
create_admin_ui(app)
return app
# create the api endpointsx
def create_api(app, HOST="localhost", PORT=5000, API_PREFIX="/api"):
api = SAFRSAPI(app, host=HOST, port=PORT, prefix=API_PREFIX, json_encoder=SAFRSJSONEncoderExt)
api.expose_object(models.User)
api.expose_object(models.Book)
api.expose_object(models.StoreModel)
api.expose_object(models.ItemModel)
print("Created API: http://{}:{}{}".format(HOST, PORT, API_PREFIX))
def create_admin_ui(app):
try:
admin = Admin(app, url="/admin")
for model in [models.User, models.Book, models.StoreModel, models.ItemModel]:
# admin.add_view(sqla.ModelView(model, db.session))
admin.add_view(AdminViewExt(model, db.session))
except Exception as exc:
print(f"Failed to add flask-admin view {exc}")
def create_app_for_test(config_filename=None, host="localhost"):
app = Flask("LogicBank Demo App")
app.config.from_object("config.Config")
db.init_app(app)
# https://flask-sqlalchemy.palletsprojects.com/en/2.x/contexts/
app.app_context().push()
return app
| [
"[email protected]"
] | |
b0a102927596daf62fc0288a61c20a0b7fcd64e2 | 67dbf167d4fcd52a2ac6d874502a1cbed8c0300e | /self-driving-car/p3_behavior_clone/load_data.py | 4bf29ecd6c6c7f442120006f5bc0149f6dea7a0a | [] | no_license | yaoyaowd/tensorflow_demo | 9e0c20e971f64d518fbdf3e989270b79ad5e1092 | 969f69b32cb2db6f86f4bf00aa29961b5e9cfc89 | refs/heads/master | 2021-01-17T16:19:08.470495 | 2018-09-10T18:35:46 | 2018-09-10T18:35:46 | 61,096,382 | 3 | 0 | null | 2016-09-30T23:45:01 | 2016-06-14T06:09:16 | Python | UTF-8 | Python | false | false | 3,016 | py | import cv2
import csv
from config import *
import numpy as np
from os.path import join
import random
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
DIR = '/Users/dwang/tensorflow_demo/'
def split_train_val(file, test_size=0.2):
with open(file, 'r') as input:
reader = csv.reader(input)
data = [row for row in reader][1:]
train, test = train_test_split(data, test_size=test_size, random_state=1)
return train, test
def preprocess(filename):
frame_bgr = cv2.imread(filename)
frame_cropped = frame_bgr[CROP_HEIGHT, :, :]
frame_resized = cv2.resize(frame_cropped, dsize=(WIDTH, HEIGHT))
if CHANNELS == 1:
frame_resized = np.expand_dims(cv2.cvtColor(frame_resized, cv2.COLOR_BGR2YUV)[:,:,0], 2)
return frame_resized.astype('float32')
def load_data_batch(data, batchsize, augment_data, bias):
"""
:param data: list of training data
:param batchsize:
:param data_dir: directory of frames
:param augment_data:
:param bias: bias for balancing ground truth distribution.
:return:
"""
h, w, c = HEIGHT, WIDTH, CHANNELS
x = np.zeros(shape=(batchsize, h, w, c), dtype=np.float32)
y_steer = np.zeros(shape=(batchsize, ), dtype=np.float32)
shuffled_data = shuffle(data)
cnt = 0
while cnt < batchsize:
ct_path, lt_path, rt_path, steer, _, brake, speed = shuffled_data.pop()
steer = np.float32(steer)
camera = random.choice(['frontal', 'left', 'right'])
if camera == 'frontal':
frame = preprocess(join(DIR, ct_path.strip()))
steer = steer
elif camera == 'left':
frame = preprocess(join(DIR, lt_path.strip()))
steer += DATA_CORRECTION
elif camera == 'right':
frame = preprocess(join(DIR, rt_path.strip()))
steer -= DATA_CORRECTION
if augment_data:
if random.random() < 0.5:
frame = frame[:, ::-1, :]
steer *= -1.
steer += np.random.normal(loc=0, scale=AUGMENT_STEER_SIGMA)
if CHANNELS == 3:
frame = cv2.cvtColor(frame, code=cv2.COLOR_BGR2HSV)
frame[:, :, 2] *= random.uniform(AUGMENT_VALUE_MIN, AUGMENT_VALUE_MAX)
frame[:, :, 2] = np.clip(frame[:, :, 2], a_min=0, a_max=255)
frame = cv2.cvtColor(frame, code=cv2.COLOR_HSV2BGR)
steer_magnitude_thresh = np.random.rand()
if (abs(steer) + bias) < steer_magnitude_thresh:
pass
else:
x[cnt] = frame
y_steer[cnt] = steer
cnt += 1
return x, y_steer
def generate_data_batch(data, batchsize=BATCH_SIZE, augment_data=True, bias=0.5):
while True:
x, y = load_data_batch(data, batchsize, augment_data, bias)
yield x, y
if __name__ == '__main__':
train_data, test_data = split_train_val('/Users/dwang/self-driving-car/project_3_behavioral_cloning/data/driving_log.csv') | [
"[email protected]"
] | |
50f8231b426f4b90e32e818ad07606ec9cd78ad7 | 77f0dc068f1a8353ffc23ffd3457c4af8507b114 | /lang/python/practices/exception_with.py | a4d0bd763ba0a31e90a526fd5c94fc45577e84ae | [] | no_license | hclife/code-base | aa549a2b6a33b3ed1f0932801f0eb85eca3461de | f22ba81b8d363c50c3ac3fa5409e8fb3440cda83 | refs/heads/master | 2021-07-18T19:23:05.828540 | 2021-05-16T10:24:32 | 2021-05-16T10:24:32 | 83,208,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | #!/usr/bin/env python3
with open("poem.txt") as f:
for line in f:
print(line,end='')
| [
"[email protected]"
] | |
aa1c6f862064385437b98c6ba39a39bb78ef6683 | d3974efafaa04f016d1437cc6e63eec9afea45b7 | /perf/Benchmarks/KV stores/drivers/bdbdriver.py | f951ca17258f556936ba4b81b8ba58c67c56b6a4 | [] | no_license | madthanu/alc-strace | c6a24d7b7cdea866cf599fb24c7029d69682ef86 | b791b5823dea82dbfa4f8d050c77e43c007d0277 | refs/heads/master | 2021-01-02T09:13:58.136533 | 2014-07-26T03:39:14 | 2014-07-26T03:40:50 | 15,639,732 | 1 | 0 | null | 2014-02-27T21:56:03 | 2014-01-04T21:15:44 | C | UTF-8 | Python | false | false | 3,546 | py | from datetime import datetime
import lmdb
from abstractdriver import *
import os
from bsddb3 import db
import shutil
import sys
import time
class BdbDriver(AbstractDriver):
DEFAULT_CONFIG = {
"sync": ("The durabilty of the key-value pairs being stored. ",
"SYNC"),
"method": ("Storage engine",
"BTREE"),
"use_mmap": ("use mmap?",
"No"),
}
def __init__(self, name):
super(BdbDriver, self).__init__("bdb")
def __str__(self):
return self.driver_name
def makeDefaultConfig(self):
"""This function needs to be implemented by all sub-classes.
It should return the items that need to be in your implementation's configuration file.
Each item in the list is a triplet containing: ( <PARAMETER NAME>, <DESCRIPTION>, <DEFAULT VALUE> )
"""
return BdbDriver.DEFAULT_CONFIG
def loadConfig(self, config):
"""Initialize the driver using the given configuration dict"""
try:
if config['sync'] == 'SYNC':
self.syncFlag = db.DB_TXN_SYNC
elif config['sync'] == 'NOSYNC':
self.syncFlag = db.DB_TXN_NOSYNC
elif config['sync'] == 'WRITENOSYNC':
self.syncFlag = db.DB_TXN_WRITE_NOSYNC
if config['method'] == 'BTREE':
self.accessMethod = db.DB_BTREE
elif config['method'] == 'HASH':
self.accessMethod = db.DB_HASH
self.dbLocation = '/media/K4/home/perfstore'
if os.path.exists(self.dbLocation):
os.system('rm -rf '+ self.dbLocation)
os.mkdir(self.dbLocation)
self.fileName = 'mydb.db'
dbvar = self.GetDBInstance()
dbvar.open(self.dbLocation + '/' + self.fileName, None, self.accessMethod, db.DB_CREATE | db.DB_AUTO_COMMIT | db.DB_NOMMAP)
except Exception as e:
print str(e)
raise
def GetDBInstance(self):
self.dbInstance = None
self.env = db.DBEnv()
self.env.set_tx_max(100000)
self.env.set_flags(db.DB_CREATE | db.DB_NOMMAP | db.DB_CHKSUM, 1)
self.env.open(self.dbLocation, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOG | db.DB_INIT_TXN | db.DB_INIT_LOCK | db.DB_THREAD)
self.dbInstance = db.DB(self.env)
return self.dbInstance
def formatConfig(self, config):
"""Return a formatted version of the config dict that can be used with the --config command line argument"""
ret = "# %s Configuration File\n" % (self.driver_name)
ret += "# Created %s\n" % (datetime.now())
ret += "[%s]" % self.name
for name in config.keys():
desc, default = config[name]
if default == None: default = ""
ret += "\n\n# %s\n%-20s = %s" % (desc, name, default)
return (ret)
def readValue(self, key, tx):
"""Read a value in the context of this transaction"""
self.dbInstance.get(str(key), txn=tx)
def writeValue(self, key, value, tx):
"""Write a value in the context of this transaction"""
self.dbInstance.put(str(key), str(value),txn=tx)
def txBegin(self, willWrite):
assert self.env is not None
tx = self.env.txn_begin(flags = self.syncFlag)
return tx
def txCommit(self, tx):
"""Commit the given transaction."""
assert tx is not None
tx.commit()
def txEnd(self, tx):
"""End the given transaction."""
#Do nothing
def close(self):
self.dbInstance.close()
self.env.close()
os.system('rm -rf /media/K4/home/perfstore')
os.system('mkdir /media/K4/home/perfstore')
## CLASS
| [
"[email protected]"
] | |
444f54b404ef06bdb9913c5f1d2595544a062699 | d3b4796742e3936d72806c362e177d61d61ec214 | /9.py | 27d92e387f87fa9712160fb4c8c24b827c1f1442 | [] | no_license | YuhuiDai/Project-Euler | 3d6cbf2aad92307a3e0ab6a4c2470d00f86f6092 | 60e74112da6bf0a9e2fce5c98e0bb40fe910810a | refs/heads/master | 2021-01-10T14:39:35.317969 | 2016-04-10T02:26:10 | 2016-04-10T02:26:10 | 55,876,857 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | def solver():
for a in range(1, 334):
for b in range (2, 335):
c = 1000 - a - b
if a != b and b != c and a != c and a<b<c:
if a**2+b**2 == c**2:
print (a,b,c)
return a*b*c
result = solver()
print (result) | [
"[email protected]"
] | |
aa867b8442ee4cc96fa8e58208a47dd1f8401733 | 0d7888b73b9aff08979fb6d0ff65ffb681a43645 | /django/tulingxueyuan_views/tulingxueyuan_views/urls.py | bce4b564332123a7cb2cf53f950826a821a7f3b1 | [] | no_license | RyanPeking/PythonPractice | 21a7439de4cddba1dd2dc095d0ff3a6319b21f41 | 34c6873ff99c8cbb06564481ce0237dfebf32732 | refs/heads/master | 2020-05-17T19:36:14.746740 | 2019-08-05T01:37:54 | 2019-08-05T01:37:54 | 183,918,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 505 | py | from django.conf.urls import include, url
from django.contrib import admin
from teacher_app import views as v
urlpatterns = [
# Examples:
# url(r'^$', 'tulingxueyuan_views.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^teacher/', v.teacher),
url(r'^render2_test/', v.render2_test),
url(r'^render3_test/', v.render3_test),
url(r'^render4_test/', v.render4_test),
url(r'^get404/', v.get404)
]
| [
"[email protected]"
] | |
84b44c7ea2a5643257246c034170c1cbf678694f | cced3f59471c78ea68a53f5bf7e875033681ac8d | /benchmark.py | ae7333fe3407b09d43244fdafc2fbd00039dcd4d | [] | no_license | samuel1208/facepp-python-sdk | 31164642a68b95cc20dab4abf7ce499450a61f17 | cfade875bf4b98a5d444f5a2ec89608f207c1937 | refs/heads/master | 2021-01-18T01:51:05.104146 | 2014-03-23T14:04:00 | 2014-03-23T14:04:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,020 | py | #!/usr/bin/env python2
import sys, os
import getopt
import datetime as dt
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
bm_total_face_num = 0
bm_total_male_num = 0
bm_total_female_num = 0
res_total_pos_face_num = 0
res_total_neg_face_num = 0
res_total_pos_male_num = 0
res_total_neg_male_num = 0
res_total_pos_female_num = 0
res_total_neg_female_num = 0
def usage():
print('----------------------------------------------------------------')
print('[[Usage]]::evaluate the face detector')
print('\t./benchmark.py [Paras] benchmark_path_list res_path_list')
print('[[Paras]]::')
print('\t--help/-h : Print usage info')
print('----------------------------------------------------------------')
def is_true_face(bm_face_list, rect):
return (False, "None")
def cal_detect_ratio(bm_xml_path, res_xml_path):
global bm_total_face_num
global bm_total_male_num
global bm_total_female_num
global res_total_pos_face_num
global res_total_neg_face_num
global res_total_pos_male_num
global res_total_neg_male_num
global res_total_pos_female_num
global res_total_neg_female_num
bm_tree = ET.parse(bm_xml_path)
bm_root = bm_tree.getroot()
res_tree = ET.parse(res_xml_path)
res_root = res_tree.getroot()
### Get the benchmark first
### Analyse the res file
for face in res_root:
x, y, width, height = 0, 0, 0, 0
gender = ''
#get pos first
for attr in face:
if 'postion' != attr.tag:
continue
for pos in attr:
if 'x' == pos.tag:
x = eval(pos.text)
elif 'y' == pos.tag:
y = eval(pos.text)
elif 'width' == pos.tag:
width = eval(pos.text)
elif 'height' == pos.tag:
height = eval(pos.text)
break
## Judge if a false detection
## get gendet info
for attr in face:
if 'gender' != attr.tag:
continue
for gen in attr:
if 'value' != gen.tag:
continue
gender = gen.text
break
## Judge if a false detection
def main():
if len(sys.argv) < 2:
usage()
return
try:
opts, args=getopt.getopt(sys.argv[1:],
"h", ["help"])
except getopt.GetoptError:
print("ERROR:: Errors occur in getting option Paras")
usage()
return
bIsHelp = False
for op, arg in opts:
if op in ("--help","-h"):
bIsHelp = True
else:
continue
if bIsHelp:
usage()
return
try:
bm_path_list = args[0]
res_path_list = args[1]
if not os.path.exists(bm_path_list):
raise
if not os.path.exists(res_path_list):
raise
except:
print("ERROR::Please input the right 'benchmark_path_list' and 'jpg_path_list' Paras")
return
bm_file = open(bm_path_list, 'r')
bm_list = bm_file.readlines()
res_file = open(res_path_list, 'r')
res_list = res_file.readlines()
bm_file.close()
res_file.close()
bm_list = list(set(bm_list))
res_list = list(set(res_list))
total_bm_face_num = 0
total_res_face_num = 0
for bm in bm_list:
bm = bm.strip()
bm_path, bm_name = os.path.split(bm)
bNotFound = True
for res in res_list:
res = res.strip()
res_path, res_name = os.path.split(res)
if res_name != bm_name:
continue
cal_detect_ratio(bm, res)
bNotFound = False
if bNotFound:
print("WARNNING::No File Match %s"%(bm))
if '__main__' == __name__:
main()
| [
"[email protected]"
] | |
f966c3c04a0ff19a2cee9c8acb70774a26f66891 | 897c7501532e2d9fdd8d9531b7fdb40df6e49e8d | /build_distance_matrix.py | ee63d39040f85c1602e1ed5aef6e72dc81e2a175 | [] | no_license | martinkang93/hpa_image_analysis | 36da612aab06a1cc1ecacd79acacd78ac42f524f | aeb850939444b2e40ca0b6930b9fc0d7604ce869 | refs/heads/master | 2021-04-22T16:36:01.992178 | 2020-05-16T21:30:41 | 2020-05-16T21:30:41 | 249,858,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,694 | py | from utils.utils import img_load_aws, save_patch
from utils.image_preproc import patch_generator
from feature_extraction import InceptionV3Vectorizer
import numpy as np
import scipy
from tqdm import tqdm
import pandas as pd
import os
import pickle as pkl
import json
img_vectorizer = InceptionV3Vectorizer()
organ = 'colon'
if not os.path.exists('/data/hpa/{}/distance_matrices'.format(organ)):
os.makedirs('/data/hpa/{}/distance_matrices'.format(organ))
if not os.path.exists('/data/hpa/{}/embeddings'.format(organ)):
os.makedirs('/data/hpa/{}/embeddings'.format(organ))
df = pd.read_csv('./data/{}_enhanced.csv'.format(organ))
if not os.path.exists('./error_log.pkl'):
error_log = []
else:
error_log = pkl.load(open('./error_log.pkl'.format(organ), 'rb'))
for gene in tqdm(df['gene'].unique()):
gene_df = df[df['gene']==gene]
for antibody in gene_df['antibody'].unique():
if os.path.exists('/data/hpa/{}/distance_matrices/{}.npy'.format(organ, gene+'-'+antibody)): continue
antibody_df = gene_df[gene_df['antibody']==antibody]
vector_list = []
file_list = []
for index, row in antibody_df.iterrows():
filename = row['s3_aws_urls'].split('/')[-1].split('.')[0]
if row['s3_aws_urls'] in error_log:
continue
if os.path.exists('/data/hpa/{}/embeddings/{}.npy'.format(organ, filename)):
mean_vector = np.load('/data/hpa/{}/embeddings/{}.npy'.format(organ, filename))
else:
img = img_load_aws(row['s3_aws_urls'])
if (img is None) or (len(img.shape)!=3):
error_log.append(row['s3_aws_urls'])
pkl.dump(error_log, open('./error_log.pkl', 'wb'))
continue
patch_list = patch_generator(img, (299, 299))
if not len(patch_list)>0:
error_log.append(row['s3_aws_urls'])
pkl.dump(error_log, open('error_log.pkl', 'wb'))
continue
features = img_vectorizer.generate_vectors(patch_list)
mean_vector = np.mean(features, axis=0)
vector_list.append(mean_vector)
file_list.append(row['s3_aws_urls'])
if not os.path.exists('/data/hpa/{}/embeddings/{}.npy'.format(organ, filename)):
np.save('/data/hpa/{}/embeddings/{}.npy'.format(organ, filename), mean_vector)
if len(vector_list) == 0:
continue
distance_matrix = scipy.spatial.distance.cdist(np.array(vector_list), np.array(vector_list), metric='cosine')
distance_pairs = []
for i in range(len(file_list)):
for j in range(len(file_list)):
distance_pairs.append([file_list[i], file_list[j], distance_matrix[i][j]])
np.save('/data/hpa/{}/distance_matrices/{}.npy'.format(organ, gene+'-'+antibody), distance_pairs)
# np.save('./data/distance_matrices/{}.npy'.format(gene+'-'+antibody), distance_matrix)
| [
"[email protected]"
] | |
91aa630e5b8992a683ccb7f0b84b1a9309810bf1 | 06e6e2fc2b3755f90e09bece421e1a40b2d79a10 | /D3/TestRequests3_teacher.py | 88633a9eb3ad4ebf44f3b5f65957520aa236b477 | [] | no_license | MrBearTW/TCFST | 5a5942c15ca51754e317f8de3f5acd08eee758f9 | 25d50fc56a13cf8a68e3fd42d5cffc435c255162 | refs/heads/master | 2022-12-11T00:43:09.430322 | 2019-06-01T15:47:18 | 2019-06-01T15:47:18 | 102,337,507 | 0 | 0 | null | 2022-12-07T23:27:56 | 2017-09-04T08:28:14 | Python | UTF-8 | Python | false | false | 1,325 | py | import requests
from pymongo import MongoClient
REST_EU_ROOT_URL = 'http://restcountries.eu/rest/v1'
def get_mongo_database(db_name, host='localhost',port=27017,username=None,password=None):
if username and password:
mongo_uri='mongodb://%s:%s@%s/%s'%(username,password,host,db_name)
conn=MongoClient(mongo_uri)
else:
conn=MongoClient(host,port)
return conn[db_name]
def REST_country_request(field='all',name=None,params=None):
headers = {'User-Agent':'Mozilla/5.0'}
if not params:
params={}
if field == 'all':
return requests.get(REST_EU_ROOT_URL+'/all')
url = '%s/%s/%s'%(REST_EU_ROOT_URL, field, name)
print('URL : '+url)
response=requests.get(url,params=params,headers=headers)
if not response.status_code == 200:
raise Exception('Request failed with status code '+ str(response.status_code))
return response
db_nobel = get_mongo_database('nobel_prize')
col = db_nobel['country_data']
#response = REST_country_request()
#col.insert_many(response.json())
#print(list(col.find({'currencies':{'$in':['USD']}})))
#print(list(col.find({'currencies':{'$in':['TWD']}})))
print(list(col.find({'name':{'$in':['Syria']}})))
# MongoDB指令 https://docs.mongodb.com/manual/reference/operator/query-comparison/ | [
"[email protected]"
] | |
08fe9b7806639360d7027be99afefb7df5a948af | 8b669c6c6e1a5c30fe15046e3bb7a1053826fbea | /game.py | e574da9b67750de74c4ecdcecfe06bc88d87d876 | [] | no_license | eetukarvonen/Battleship | 34bfdb6807199b90a3349bde12fe0a4c049d5455 | 573bea1570957118185d6e834ed2fa2e0bb35de1 | refs/heads/master | 2021-04-08T00:54:23.681447 | 2020-03-26T11:05:32 | 2020-03-26T11:05:32 | 248,721,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,886 | py | """
Battleship boardgame
@author: Eetu Karvonen
Notes:
Empty square = ' '
Ship square = 'a' or 'b' or 'c'...
orient horizontal = 0
orient vertical = 1
"""
import random
import time
import ai
game = {
"boards": {
"boardP": [],
"boardC": [],
"boardC_show": []
},
"player_ships": {
"a": 2,
"b": 3,
"c": 3,
"d": 4,
"e": 5
},
"computer_ships": {
"a": 2,
"b": 3,
"c": 3,
"d": 4,
"e": 5
},
"turn": 0, # Computers turn 0, players turn 1
"player_last": "", # Used to print last moves, 'missed', 'hit' or 'hit and sunk'
"com_last": ""
}
def create_board():
# Create empty board
board = []
for i in range(10):
i = i # To avoid "unused variable" -warning
board_row = []
for j in range(10):
j = j # To avoid "unused variable" -warning
board_row.append(' ')
board.append(board_row)
return board
def ai_place_ships():
for key in game["computer_ships"]:
size = game["computer_ships"][key]
# Randomly place ships
x_coord = random.randint(0,9)
y_coord = random.randint(0,9)
orient = random.randint(0,1)
# If placement is not valid, create new placement as long as it is valid
while not validate_placement(x_coord, y_coord, orient, size, game["boards"]["boardC"]):
x_coord = random.randint(0,9)
y_coord = random.randint(0,9)
orient = random.randint(0,1)
place_ship(x_coord, y_coord, orient, key, game["boards"]["boardC"])
def player_place_ships():
for key in game["player_ships"]:
# Ask player where to place ships
size = game["player_ships"][key]
print("Where do you want to place a ship sized ", size)
x, y, orient = ask_placement(size) # ask_placement() also validates placement
place_ship(x, y, orient, key, game["boards"]["boardP"])
print_board()
def place_ship(x,y,orient,ship,board):
# Place a single ship. Call after validated placement
if orient == 0:
for i in range(game["player_ships"][ship]):
board[y][x+i] = ship
elif orient == 1:
for i in range(game["player_ships"][ship]):
board[y+i][x] = ship
return board
def validate_placement(x,y,orient,size,board):
# Checks if ship can be placed to x,y. Returns true or false
if x < 0 or y < 0:
return False
if orient == 0 and x + size > 10:
return False
elif orient == 1 and y + size > 10:
return False
else:
if orient == 0:
for i in range(size):
if board[y][x+i] != ' ':
return False
elif orient == 1:
for i in range(size):
if board[y+i][x] != ' ':
return False
return True
def ask_placement(size):
# Ask player where to place a ship and which direction
while True:
try:
x_coord = int(input("Give x-coordinate: ")) - 1
y_coord = int(input("Give y-coordinate: ")) - 1
orient = input("Place ship (v)ertically or (h)orizontally: ")
print(" ")
if orient != 'v' and orient != 'h':
raise Exception("Invalid input, enter v or h")
if orient == 'v':
orient = 1
else:
orient = 0
if x_coord > 9 or x_coord < 0 or y_coord > 9 or y_coord < 0:
raise Exception("Invalid input. Please use values between 1 to 10 only.")
if validate_placement(x_coord, y_coord, orient, size, game["boards"]["boardP"]):
return x_coord, y_coord, orient
else:
raise Exception("Can't place a ship there, try again")
except ValueError:
print("Invalid input, enter a number between 1 and 10")
continue
except Exception as e:
print(e)
continue
def computer_guess(board,comp_ai):
move = comp_ai.pick_move()
x = move[0] - 1
y = move[1] - 1
response = make_move(board, x, y)
if response == 'miss':
board[y][x] = '*'
game["com_last"] = "Missed"
elif response == 'hit':
comp_ai.hit(x+1, y+1)
if check_sunk(board, x, y, "player_ships"):
game["com_last"] = "Hit and sunk"
comp_ai.hits = []
else:
game["com_last"] = "Hit"
board[y][x] = '$'
print("Computer: ", game["com_last"]) # Prints 'Missed', 'Hit', or 'Hit and sunk'
return comp_ai
def player_guess(board):
while True:
try:
x = int(input("Give x-coordinate you want to hit: ")) - 1
y = int(input("Give y-coordinate you want to hit: ")) - 1
if x < 0 or x > 9 or y < 0 or y > 9:
raise ValueError
except ValueError:
print("Invalid input, enter a number between 1 and 10")
continue
response = make_move(board, x, y)
if response == 'again':
print("You have already fired there, try again")
continue
elif response == 'miss':
board[y][x] = '*'
game["boards"]["boardC_show"][y][x] = '*'
game["player_last"] = "Missed"
break
elif response == 'hit':
if check_sunk(board, x, y, "computer_ships"):
game["player_last"] = "Hit and sunk"
else:
game["player_last"] = "Hit"
board[y][x] = '$'
game["boards"]["boardC_show"][y][x] = '$'
break
def make_move(board,x,y):
if board[y][x] == ' ':
return 'miss'
elif board[y][x] == '*' or board[y][x] == '$':
return 'again'
else:
return 'hit'
def check_sunk(board,x,y,opponent):
ship = board[y][x]
game[opponent][ship] -= 1
return game[opponent][ship] == 0
def print_board():
print("Your board")
for row in game["boards"]["boardP"]:
print(row)
print(" ")
def print_radar():
print("Your radar")
for row in game["boards"]["boardC_show"]:
print(row)
print(" ")
def game_on():
# Checks if all ships are sunk
sum = 0
for ship in game["computer_ships"]:
sum += game["computer_ships"][ship]
if sum == 0:
return False
sum = 0
for ship in game["player_ships"]:
sum += game["player_ships"][ship]
if sum == 0:
return False
return True
def play():
game["turn"] = random.randint(0,1) # Randomly choose which start
comp_ai = ai.Ai() # Init ai
if game["turn"] == 0:
print("Computer starts")
else:
print("You start")
while game_on():
if game["turn"] == 0:
# Computer's turn
comp_ai = computer_guess(game["boards"]["boardP"], comp_ai)
game["turn"] = 1
else:
# Player's turn
print(" ")
print_board()
print_radar()
player_guess(game["boards"]["boardC"])
game["turn"] = 0
print("You: ", game["player_last"]) # Prints 'Missed', 'Hit', or 'Hit and sunk'
if __name__ == "__main__":
game["boards"]["boardP"] = create_board()
game["boards"]["boardC"] = create_board()
game["boards"]["boardC_show"] = create_board()
print("Welcome to play battleships")
print(" ")
ai_place_ships()
player_place_ships()
play()
print("Game over")
if game["turn"] == 0:
print("Player won")
print("")
print_radar()
else:
print("Computer won")
print("")
print_board()
| [
"[email protected]"
] | |
787c2041427a7659e5163b4812c8e996d8c7b7f6 | 76c7b5f702f1122a4634298cae86a7f27a11ec7b | /Problem7.py | 4a94da2c209d444e2243545f844acbcf8023545a | [] | no_license | gtonra89/fundamentals-repositry-sheet- | 2ea1ea2460b2b2ba8c7a518a75233de4b7f56ea9 | d8f9efad862c431826efe926efec27b0b69b28aa | refs/heads/master | 2021-05-16T07:56:13.729672 | 2017-11-14T14:30:22 | 2017-11-14T14:30:22 | 103,947,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | # Program to check if a string is a palindrome or not
# change this value for a different output
def palTest(str):
# make it suitable for caseless comparison
myString = str.casefold()
# reverse the string
reverseString = reversed(myString)
# check if the string is equal to its reverse
print("string to check :",myString)
if list(myString) == list(reverseString):
print("It is palindrome")
else:
print("It is not palindrome")
myString = 'abaBA' #declare string
palTest(myString) #call function pass string | [
"[email protected]"
] | |
9a66d5b0dedcb4a86a41e85de4805ee83352a0da | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.3/tests/regressiontests/db_typecasts/__init__.py | 5c336d269a796c4bded21eea80d57c2199e00548 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/tests/regressiontests/db_typecasts/__init__.py | [
"[email protected]"
] | |
2bd7c5539ff61bfdf0ea6874bb9556331e936d1f | 41dc956684872313ed19824453088ea5bffd732c | /support/obstacle.py | 2425d54380a75ffbf139cd2473b13189a1b4d76c | [] | no_license | NicholasM0rris/Canadarm-Continuous-Motion-Planning-of-robot-arm | aefcb25a6e7ebca8f3c49830469b724c9c0022e9 | 2dc6369e2b3c0969eeee721b7d97d5585bb46cf7 | refs/heads/master | 2020-07-26T19:15:12.465134 | 2019-09-30T08:28:02 | 2019-09-30T08:28:02 | 208,742,987 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py |
class Obstacle:
"""
Class representing a rectangular obstacle. You may add to this class if you wish, but you should not modify the
existing functions or variable names.
COMP3702 2019 Assignment 2 Support Code
Last updated by njc 24/08/19
"""
def __init__(self, x1, y1, x2, y2):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
assert x1 < x2, "For a valid obstacle, mush have x1 < x2"
assert y1 < y2, "For a valid obstacle, mush have y1 < y2"
self.corners = [(x1, y1), (x1, y2), (x2, y2), (x2, y1)]
self.edges = [(self.corners[i], self.corners[(i + 1) % 4]) for i in range(4)]
| [
"[email protected]"
] | |
a17386995cdeb448d6499cc0e565215683fe5218 | 9e694d17b32d60139da026c2d46ef01219eca2b7 | /moderator/views.py | db14ad7067c66c3a6cd8a5c5d9960067458d2a8b | [] | no_license | bronejeffries/AutoResponseSystem-api | f599035f25967bd8015d214c93e39d047c53c99b | aee54deaf0f0879820a7d86e071d935210a7b6b8 | refs/heads/master | 2020-04-25T17:52:48.415587 | 2019-09-13T05:25:45 | 2019-09-13T05:25:45 | 172,964,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from Ars.decorators import Get_check
from Ars.models import Session
# Create your views here.
@Get_check
@login_required
def index(request):
context = {}
current_user_id=request.user.id
user_sessions = Session.objects.filter(owner=current_user_id)
context['sessions']=user_sessions
inactive_session_count = user_sessions.filter(status="stopped").count()
active_session_count = user_sessions.filter(status='running').count()
context['active_count']=active_session_count
context['inactive_count']=inactive_session_count
# print(context)
return render(request, 'moderator/index.html', context)
| [
"[email protected]"
] | |
db29248ffcdd3c9a0e1b7178cbd41c0c402288b3 | e15e9160e4627c5e05f87706ffed90a581653a37 | /src/googlenet.py | 705f6d1c4d4d72c3fab7d5b33f31b4ba908327b2 | [
"Apache-2.0"
] | permissive | chhwang/cmcl | eab05d3f070c2a0e4876b99b96f944ba4668a1e9 | 57f41f4b166b8544c56580f9d32ee98b997e3f59 | refs/heads/master | 2021-01-21T19:13:21.781746 | 2018-08-04T05:04:48 | 2018-08-04T05:04:48 | 92,129,458 | 15 | 5 | Apache-2.0 | 2018-08-04T05:04:49 | 2017-05-23T04:34:17 | Python | UTF-8 | Python | false | false | 3,610 | py | # Author: Kimin Lee ([email protected]), Changho Hwang ([email protected])
# GitHub: https://github.com/chhwang/cmcl
# ==============================================================================
import tensorflow as tf
import layers
from feature_sharing import feature_sharing
FLAGS = tf.app.flags.FLAGS
if FLAGS.dataset == 'cifar':
MAX_STEPS = 39063
VAR_LIST = [0.1, 0.02, 0.004, 0.0008]
PIVOT_LIST = [0, 9766, 19532, 29297]
WD_FACTOR = 0.0005
elif FLAGS.dataset == 'svhn':
MAX_STEPS = 58200
VAR_LIST = [0.1, 0.02, 0.004, 0.0008]
PIVOT_LIST = [0, 14550, 29100, 43650]
WD_FACTOR = 0.0005
else:
raise ValueError('Not supported dataset: %s' % FLAGS.dataset)
def OPTIMIZER(lr):
return tf.train.MomentumOptimizer(lr, 0.9, use_nesterov=True)
def inference(images):
"""Definition of model inference.
Args:
images: A batch of images to process. Shape [batch_size,32,32,3]
"""
is_train=tf.get_collection('is_train')[0]
def inception(name, l, wf):
"""Inception module.
Args:
name: Scope name of this function.
l: Output of previous layer.
wf: Channel width factor of this module.
"""
with tf.variable_scope(name):
branchpool = tf.nn.max_pool(l, [1,2,2,1], [1,1,1,1], 'SAME')
branchpool = layers.conv('conv_pool', branchpool, 32*wf, kernel_size=1)
branch5x5 = layers.conv('conv_5x5_0', l, 16*wf, kernel_size=1)
branch5x5 = tf.nn.relu(branch5x5)
branch5x5 = layers.conv('conv_5x5_1', branch5x5, 32*wf, kernel_size=5)
branch3x3 = layers.conv('conv_3x3_0', l, 32*wf, kernel_size=1)
branch3x3 = tf.nn.relu(branch3x3)
branch3x3 = layers.conv('conv_3x3_1', branch3x3, 64*wf, kernel_size=3)
branch1x1 = layers.conv('conv_1x1_0', l, 64*wf, kernel_size=1)
branch1x1 = tf.nn.relu(branch1x1)
cc = tf.concat([branch1x1,branch3x3,branch5x5,branchpool], 3)
cc = layers.batchnorm('bn_0', cc, is_train)
return tf.nn.relu(cc)
# GoogLeNet-18 inference
with tf.variable_scope('inference'):
features = []
for m in range(FLAGS.num_model):
l = images
with tf.variable_scope('model_%d' % m):
l = layers.conv('conv_init', l, 32, kernel_size=3)
l = layers.batchnorm('bn_init', l, is_train)
l = tf.nn.relu(l)
features.append(l)
# stochastically share hidden features right before the first pooling
if FLAGS.feature_sharing:
features = feature_sharing(features)
for m in range(FLAGS.num_model):
l = features[m]
with tf.variable_scope('model_%d' % m):
l = tf.nn.max_pool(l, [1,2,2,1], [1,2,2,1], 'VALID')
l = inception('inception_1a', l, 1)
l = inception('inception_1b', l, 2)
l = tf.nn.max_pool(l, [1,2,2,1], [1,2,2,1], 'VALID')
l = inception('inception_2a', l, 2)
l = inception('inception_2b', l, 2)
l = inception('inception_2c', l, 2)
l = inception('inception_2d', l, 4)
l = tf.nn.max_pool(l, [1,2,2,1], [1,2,2,1], 'VALID')
l = inception('inception_3a', l, 4)
l = inception('inception_3b', l, 4)
# global average pooling
l = tf.reduce_mean(l, [1, 2])
l = layers.fully_connected('fc_0', l, 10)
features[m] = l
return features
| [
"[email protected]"
] | |
81fe9826eff3fbe41c176a0739e05a9336a8ed00 | 887e3929398db35245dfae711afeba5e7e14d26d | /Projects/python101/projects_/continue_1.py | 026bf61efc7d5311629d69e50307926a3d41a924 | [] | no_license | zmcburrows/RPG | e9d38baf0c0f3a655ed3a1f16de1d9993ef0343c | 629a708cd26ea7dd02933425c02a2f15cc5a9e16 | refs/heads/main | 2023-09-04T18:58:25.441050 | 2021-11-22T11:55:16 | 2021-11-22T11:55:16 | 426,614,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | from page_424_2 import soonhowever
def continue_2():
forwards = input('''
"There’s no point in stopping now.”
When you wake you up,
you continue downhill, braking for water breaks and to record shots of your surroundings.
''')
if forwards == "":
soonhowever()
| [
"[email protected]"
] | |
7a0964f5f6b521df5c70f2ffa9bbe478c4c51b81 | f256db5b4f10dc5945076a35fa11f8db6fa97603 | /auto_report/jira_data_aly.py | 5a373c4c9e6b6bd619cb1ee53f75b21ebd6c6757 | [] | no_license | weijie-xiao/test_jira_jenkins_report | 207a069cf842a89f7cc559a21e93b76905517985 | 4647831a30eb9108b122b020b240a908a3b95174 | refs/heads/master | 2023-04-15T14:20:38.702438 | 2021-04-18T06:34:03 | 2021-04-18T06:34:03 | 359,063,175 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,698 | py | #!/usr/bin/python
# coding=utf-8
"""
Created on 2018年08月16日
@author: qyke
jira 接口类
"""
import re
import time
from typing import List
from urllib.parse import quote
from jira import JIRA
from auto_report.logger import Log
tag_re = re.compile(r'(?P<git>[email protected]:[\w\.\/\_]+\.git) (?P<tag>[\w\.\/\_]+)')
class HandleJira:
def __init__(self, url, name, pwd):
"""
:param url: jira url
:param name:
:param pwd:
"""
# 持久化jira
self.jira = JIRA(server=url,
auth=(name, pwd))
def add_comment(self, key, body):
"""
:param body: str
:return:
"""
self.jira.add_comment(key, body) # 写入commit
def status(self, key):
issue = self.jira.issue(key) # 重新载入issue
status = issue.fields.status.name
return str(status) # 返回issue 状态
def fix_version(self, key):
issue = self.jira.issue(key) # 重新载入issue
version_list = issue.fields.fixVersions
version_name_list = [str(v.name) for v in version_list]
return version_name_list # 返回issue 修复版本
def get_tags(self, key):
"""
:return: list [{'git':'','tag':''}]
"""
issue = self.jira.issue(key) # 重新载入issue
tags = issue.fields.customfield_10714
tags = tag_re.finditer(tags)
tag_list = [i.groupdict() for i in tags]
return tag_list
def transitions(self, key, trans_name, trans_field=None):
"""
:param key jira key name
:param trans_name:str 需要变更的事件名
:param trans_field:dict 附加参数
:return: result
"""
issue = self.jira.issue(key)
transitions = self.jira.transitions(issue) # 获取issue可执行事件
action_id = 0
for t in transitions:
if t['name'].strip() == trans_name.strip():
action_id = int(t['id']) # 查找事件id
if action_id:
try:
# 变更jira issue 状态
self.jira.transition_issue(issue, action_id, fields=trans_field)
result = 1
except Exception as e:
# logger.error("jira issue failed: [%s], fields: [%s]" % (self.key, trans_field))
# logger.error(e)
Log.log_message("autoWeeklyData", "[ERROR] jira issue failed: [%s], fields: [%s]", key, trans_field)
Log.log_message("autoWeeklyData", e)
result = 0
else:
# logger.warning("invalid trans_name: %s, jira: %s" %(trans_name, self.key))
Log.log_message("autoWeeklyData", "[WARNING] invalid trans_name: %s, jira: %s", trans_name, key)
result = 0
return result
def search_bug_by_assignee(self, assignees: List, start, end=0):
"""
:param assignees: 经办人_list
:param start: 开始时间 format YYYY-MM-DD
:param end: 结束时间 format YYYY-MM-DD
:return: assignee_bug{ author:{keys:[], total:0}}
"""
if 'root' in assignees:
assignees.pop(assignees.index("root"))
assignees_str = ",".join(assignees)
if not end:
end = time.strftime("%Y-%m-%d", time.localtime(time.time()))
jql_str = 'issuetype = Bug ' \
'AND assignee in (%s) ' \
'AND created >= %s AND created <= %s ' \
'AND summary !~ "jenkins PIPELINE" ' \
'ORDER BY assignee ASC' % \
(assignees_str, start, end)
result = self.jira.search_issues(jql_str=jql_str, fields="key, assignee", maxResults=500, json_result=True)
# todo maxResults分页查询
# assignee_bug = {}
assignee_project_bug = {}
for issues in result.get("issues", []):
try:
key = issues["key"]
project = self.get_project_detail(key.split("-")[0])
assignee = issues["fields"]["assignee"]["key"]
assignee = assignee.split("@")[0]
except Exception as e:
Log.log_message("autoWeeklyData", "[ERROR] jira search_bug_by_assignee failed: %s, issues: %s", e,
issues)
continue
if assignee not in assignee_project_bug:
# assignee_bug[assignee] = {}
assignee_project_bug[assignee] = {}
if project not in assignee_project_bug[assignee]:
assignee_project_bug[assignee][project] = {"keys": [], "total": 0}
# assignee_bug[assignee]["keys"].append(key)
# assignee_bug[assignee]["total"] += 1
assignee_project_bug[assignee][project]["keys"].append(key)
assignee_project_bug[assignee][project]["total"] += 1
return assignee_project_bug, quote(jql_str)
def get_project_detail(self, project_key):
projects = self.jira.project_components(project_key)
# print(projects[0].raw)
git_project_name = projects[0].raw.get("description").split(":")[-1].rstrip(".git")
return git_project_name
# test
if __name__ == '__main__':
JIRA_CONFIG = {
'url': 'https://jira.mobvista.com',
'name': 'qa_auto_test',
'pwd': 'pBlvjyXO7dhSDtUd'
}
J = HandleJira(JIRA_CONFIG['url'], JIRA_CONFIG['name'], JIRA_CONFIG['pwd'])
start = time.localtime(time.time() - 86400 * 30)
# print(J.search_bug_by_assignee(["zhihao.lin"], "%s-%s-%s" % (start[0], start[1], start[2])))
print(J.get_project_detail("SSFRONT"))
| [
"[email protected]"
] | |
b8a2899cd37bf41766dddcdfb23036f4d5561f02 | 1a783cb87ecb4b23c9525d7f79cae9e3c669542a | /Analysis/scripts/orderparams.py | 3dff71be8b15f59adf868eda48a79ee238919fb5 | [] | no_license | eshamay/Aqueous-Salt-Analysis | c3837f31287816ef5b09b888ebbad3e79c905de7 | 05998f15fc8b972cd1435dd85ccd03eb4fada4af | refs/heads/master | 2021-03-12T19:59:36.853799 | 2009-04-26T04:10:07 | 2009-04-26T04:10:07 | 184,777 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | #!/Library/Frameworks/Python.framework/Versions/Current/bin/python
import sys
from OrderParameters import *
o = OrderParameters(sys.argv[1])
o.PlotData()
| [
"[email protected]"
] | |
7c8b63957868f071e3bdc7e98d7f2b764c95dac4 | 7f3e6f8559b089892eb94554a1451ccf1397f3cc | /paint/apps/servcie/views.py | 4c339f9e465cd885043cf7bce15bcbadfac6cdeb | [] | no_license | ivanguk10/paint | 6b8deb5c100aedd23d36a823acfb47d0bcb129bd | 1acf91fc1cc7f4a0e0662c7c66ef01cfe41d7ab3 | refs/heads/main | 2023-03-17T11:20:13.574373 | 2021-03-12T22:00:32 | 2021-03-12T22:00:32 | 302,305,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,157 | py | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from .models import *
from .forms import CommentForm
def main(request):
return render(request, 'sto/main.html')
def main1(request):
comm = Comment.objects.all()[:4]
return render(request, 'sto/main1.html', {'comm': comm})
#return render(request, 'sto/main1.html')
def remont(request):
service = Service.objects.filter(service='Кузовной ремонт').first()
return render(request, 'sto/remont/remont.html', {'service': service.services.all()})
def paint(request):
return render(request, 'sto/paint/paint.html')
def tunning(request):
tuning = Service.objects.filter(service='Кузовной тюнинг').first()
return render(request, 'sto/tunning/tunning.html', {'tuning': tuning.services.all()})
def price(request):
service = Service.objects.filter(service='Кузовной ремонт').first()
paints = Service.objects.filter(service='Покраска').first()
tuning = Service.objects.filter(service='Кузовной тюнинг').first()
context = {'service': service.services.all(), 'paints': paints.services.all(), 'tuning': tuning.services.all()}
return render(request, 'sto/price.html', context)
def comment(request):
if request.method == "POST":
new_comment = Comment()
new_comment.comment_name = request.POST["comment_name"]
new_comment.comment_email = request.POST["comment_email"]
new_comment.comment_theme = request.POST["comment_theme"]
new_comment.comment_body = request.POST["comment_body"]
new_comment.save()
comm = Comment.objects.all()
return render(request, 'sto/comment.html', {'form': CommentForm(), 'comm': comm})
elif request.method == "GET":
comm = Comment.objects.all()
return render(request, 'sto/comment.html', {'form': CommentForm(), 'comm': comm})
def navbar(request):
return render(request, 'sto/navbar1.html')
def company(request):
return render(request, 'sto/company.html')
def slide(request):
return render(request, 'sto/slide.html') | [
"[email protected]"
] | |
24e6ec345fd84897f8452a4a2c7b65ed1d4e6365 | 2541c644fa91a131b827edf6391b69fc36aec50d | /network_scanner.py | 992381afe0556b1b8c952996cd5da4d4df6dd10f | [] | no_license | RajeshReddyG/PythonScripts | ca49303a1a762510cd6574db780377a37c32187b | 175f4993664609a5bbd30695d6bd4319d7ae889b | refs/heads/master | 2020-04-17T16:40:48.724913 | 2019-04-05T05:11:27 | 2019-04-05T05:11:27 | 166,751,049 | 0 | 0 | null | 2019-01-21T05:01:56 | 2019-01-21T05:01:56 | null | UTF-8 | Python | false | false | 1,651 | py | import socket
import netifaces
import subprocess
'''
Determine your own IP address
Determine your own netmask
Determine the network range
Scan all the addresses (except the lowest, which is your network address and the highest, which is your broadcast address).
Use your DNSs reverse lookup to determine the hostname for IP addresses which respond to your scan.
'''
#determine local machine ip address
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
local_ip = s.getsockname()[0]
print("My local ip address is: "+local_ip+" and host name is: "+ socket.gethostname())
s.close()
#determine netmask
gateway = netifaces.gateways()
#gateway for the network in which device is present
default_gateway = gateway['default'][netifaces.AF_INET][0]
print("default gateways is: "+str(default_gateway ))
#obtaining all the network interfaces
interfaces = netifaces.interfaces()
for interface in interfaces:
print("Scanning network: "+ str(interface))
addrs = netifaces.ifaddresses(str(interface))
try:
print(addrs[netifaces.AF_INET])
except KeyError:
print("No address assigned for interface : "+interface)
addrs = default_gateway.split('.')
#print("last device number of subnetwork : {}" + str(int(addrs[3])+1))
host_prefix = addrs[0]+"."+addrs[1]+"."+addrs[2]+"."
for host in range(int(addrs[3]), 255):
ip_addr = host_prefix+str(host)
ping = subprocess.Popen(['ping', '-c', '1', '-w', '1', ip_addr],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = ping.communicate()
if ping.returncode == 0:
print(ip_addr + " is available ")
else:
print(ip_addr + " is not available")
'''print(error)''' | [
"[email protected]"
] | |
da665bc4ed71047049f74b5eb1a9842b4bed9ac0 | 2f45eddb325b15c8844bcfff594221aa2dda7581 | /Queue/PriorityQueue.py | e649ef7911dc4cfa34dc96bbb7d85f36b55b2f67 | [] | no_license | balandhanka/Data-Structures-using-Python | 3d6a82f700a188c1b292ecf7bc2330ba71b28ca8 | 3a570aa036e092cf50820e550fd47f6e64ec6c52 | refs/heads/master | 2022-12-02T05:07:20.630448 | 2020-07-28T08:18:03 | 2020-07-28T08:18:03 | 260,622,572 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | # Author: BALAN DHANKA
class PriorityQueue(object):
def __init__(self):
self.queue = []
def __str__(self):
return ' '.join([str(i) for i in self.queue])
# for checking if the queue is empty
def isEmpty(self):
return len(self.queue) == []
# for inserting an element in the queue
def insert(self, data):
self.queue.append(data)
# for popping an element based on Priority
def delete(self):
try:
max = 0
for i in range(len(self.queue)):
if self.queue[i] > self.queue[max]:
max = i
item = self.queue[max]
del self.queue[max]
return item
except IndexError:
print()
exit()
if __name__ == '__main__':
myQueue = PriorityQueue()
myQueue.insert(12)
myQueue.insert(1)
myQueue.insert(14)
myQueue.insert(7)
print(myQueue) # 12 1 14 7
while not myQueue.isEmpty():
print(myQueue.delete(), end = ' ') # 14 12 7 1
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.