blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
577b92f826be294b581186b534f19ed48495f4d5 | 77eea52569b3c2c7e35b3a53735953157fc5683d | /venv/Lib/site-packages/rom/__init__.py | 2bbc5a3cdf950d099fbdbefc9fcf94de37386a8e | [] | no_license | Bezingatron/MathsQuiz.py | d385f8d161782f7ad5b4edfeb63c3fb29962a308 | 1f0ba5a49990bcd43928ef5667f8a221c1101ed0 | refs/heads/master | 2023-04-07T04:51:07.848099 | 2021-04-14T19:42:15 | 2021-04-14T19:42:15 | 340,170,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | from .rom import rom, rom_generate, rom_parse
| [
"[email protected]"
] | |
b8a905fd52811d0ab8aac11a7a14d94de838ca78 | 5a64aff40b596f2ee5c650811d3009bb7456a16c | /Coursera/Fundamentals_of_Digital_Image_and_Video_Processing/HW2/HW2.py | b752483920a4117935289b49044cc059abe748c2 | [] | no_license | petershen0307/Course | 5c1c901bc9f038306500f824a1daa54c7677624a | 7c6b23556db47eb61c6b926913795e15d706397c | refs/heads/master | 2021-07-20T06:32:27.969483 | 2017-10-29T15:14:28 | 2017-10-29T15:14:28 | 58,867,032 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,748 | py | from scipy import misc
from scipy import ndimage
import numpy as np
import matplotlib.pyplot as plt
def filtering_image(image_path, filter):
origin_image = misc.imread(image_path, mode='L')
# plt.imshow(origin_image, cmap='Greys_r')
# plt.show()
print(origin_image)
origin_image_double = origin_image.astype(np.float64)
print(type(origin_image_double[0][0]))
print(filter)
filter_image = ndimage.convolve(origin_image_double, filter, mode='nearest')
misc.imsave('low_pass_filter_{0}{1}.gif'.format(len(filter), len(filter[0])), filter_image, format='gif')
# plt.imshow(filter_image, cmap='Greys_r')
# plt.show()
print(filter_image)
print('origin_image_double n1: {0}'.format(len(origin_image_double)))
print('origin_image_double n2: {0}'.format(len(origin_image_double[0])))
print('filter_image n1: {0}'.format(len(filter_image)))
print('filter_image n2: {0}'.format(len(filter_image[0])))
return origin_image_double, filter_image
def calc_psnr(image_path, filter):
origin_image_double, filter_image = filtering_image(image_path, filter)
MAXI = 255
MSE = 0
for n1 in range(0, MAXI):
for n2 in range(0, MAXI):
MSE += ((origin_image_double[n1][n2] - filter_image[n1][n2]) ** 2)
MSE /= (256 ** 2)
print('MSE: {0}'.format(MSE))
PSNR = 10 * np.log10(MAXI ** 2 / MSE)
print('PSNR: {0}'.format(PSNR))
return PSNR
if '__main__' == __name__:
image_path = 'digital-images-week2_quizzes-lena.gif'
# question 7
low_pass_filter_33 = np.array([[1 / 9]*3]*3)
calc_psnr(image_path, low_pass_filter_33)
# question 8
low_pass_filter_55 = np.array([[1 / 25]*5]*5)
calc_psnr(image_path, low_pass_filter_55)
| [
"[email protected]"
] | |
fabdcb016bb2945ce5a4420e58c20a8cc2070765 | 46afba4407a98ac564ed7a2e08aebfcec4fa1ba3 | /Project Euler/problem_20.py | 83947b08ce6336236edc3cd968c9bdea337af690 | [] | no_license | areebbeigh/CompetitiveProgramming | b28ffe99ac15cadfa3b54f9974beb77c280b2309 | 04044674ad0663181326649d0c14da94108e90da | refs/heads/master | 2021-07-15T07:48:42.338241 | 2021-07-13T10:36:11 | 2021-07-13T10:36:11 | 199,145,494 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | #!/usr/bin/python3.6
def factorial(n):
if n == 0:
return 1
return n * factorial(n - 1)
print(
sum(map(lambda x: int(x), str(factorial(100))))
)
| [
"[email protected]"
] | |
4edc9bed916af82b7eaccb493c7655623dae527d | b77f0f5854169adc500be3cc8466d380eef090aa | /tests/tests_integration/test_api/test_synthetic_time_series.py | 7be5ee77e5efde763eb9dc6ac7d5413728f88631 | [
"Apache-2.0"
] | permissive | lightspin/cognite-sdk-python | 6c59ae04426df92f3b350d8fc48481a9f0cb5153 | a1e32e6ce1ffd8cb66070843dfcb7773d2127689 | refs/heads/master | 2020-09-28T23:41:07.514077 | 2019-12-05T13:13:49 | 2019-12-05T13:13:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,173 | py | import re
from datetime import datetime
from unittest import mock
import pytest
from cognite.client.experimental import CogniteClient
COGNITE_CLIENT = CogniteClient()
@pytest.fixture(scope="session")
def test_time_series():
time_series = {}
for ts in COGNITE_CLIENT.time_series.list(limit=150):
if ts.name in ["test__constant_{}_with_noise".format(i) for i in range(0, 10)]:
value = int(re.match(r"test__constant_(\d+)_with_noise", ts.name).group(1))
time_series[value] = ts
yield time_series
@pytest.fixture
def post_spy():
with mock.patch.object(
COGNITE_CLIENT.datapoints.synthetic, "_post", wraps=COGNITE_CLIENT.datapoints.synthetic._post
) as _:
yield
class TestSyntheticDatapointsAPI:
def test_retrieve(self, test_time_series, post_spy):
query = "ts{id:%d} + ts{id:%d}" % (test_time_series[0].id, test_time_series[1].id)
dps = COGNITE_CLIENT.datapoints.synthetic.retrieve(
expression=query, start=datetime(2017, 1, 1), end="now", limit=23456
)
assert 23456 == len(dps)
assert 3 == COGNITE_CLIENT.datapoints.synthetic._post.call_count
| [
"[email protected]"
] | |
c89ee707547460eac103b503d8800a3db56e5639 | 14c93e9552407b6137fba7d15d9ca99495356b3b | /insert.py | f27c46162fecc8f9a803026a0f3416b60ffc4b79 | [] | no_license | le-phuc-loc/MakeLifeEasier | 741537c4a81b43b889458a6e51fde8160fda64d5 | 61d952ebc602e7e0935baa459afe32e175d38623 | refs/heads/master | 2022-02-24T12:04:53.368373 | 2019-10-26T16:15:55 | 2019-10-26T16:15:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import cv2
f = open("caption1.txt", "a")
c = 0
i = 837
while i <= 1000:
if c % 5 == 0:
i += 1
img = './downloads/nienluan2019/hinh'+str(i)+'.jpg'
frame = cv2.imread(img)
frame = cv2.resize(frame,(600,400))
cv2.imshow("cap", frame)
cv2.waitKey(300)
c = 0
temp = "hinh" + str(i) + ".jpg#" + str(c) + " "
cap = input(temp)
temp += cap + " .\n"
f.write(temp)
c += 1
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
7574388080ad99f15f8fdaba68b1877027859752 | 9d69a980eda6a62938f42035e9fce5a9db7426bc | /Training/example10.py | 5d304fd0d558552d07c8e9ec0145eeae3503e201 | [] | no_license | NamNamju/DataAnalysis | f5c8c6cd2aebcd710f747cffcf3e309b8f0999ea | 8fbb67e14fca2bcbeac51a0cc01a2a5857e8e34b | refs/heads/master | 2023-03-04T16:42:42.095428 | 2021-02-11T14:49:50 | 2021-02-11T14:49:50 | 326,737,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | import cv2
import numpy as np
# trackbar 값을 빠르게 변경해볼 수 있다.
def change_color(x):
r = cv2.getTrackbarPos("R", "Image")
g = cv2.getTrackbarPos("G", "Image")
b = cv2.getTrackbarPos("B", "Image")
image[:] = [b, g, r]
cv2.imshow("Image", image)
image = np.zeros((600, 400, 3), np.uint8)
cv2.namedWindow("Image")
cv2.createTrackbar("R","Image", 0, 255, change_color)
cv2.createTrackbar("G","Image", 0, 255, change_color)
cv2.createTrackbar("B","Image", 0, 255, change_color)
cv2.imshow("Image", image)
cv2.waitKey(0) | [
"[email protected]"
] | |
ef0b6bd74a936ce1e7647ea3fb059c84a30c785f | 16937358e2f9cb173253969719398ddad66b0a94 | /noesis/links.py | c5076633909e4192cf24a5ad3c5baa70fda713c5 | [
"MIT"
] | permissive | fvictor/noesis-python | 99d50b9875a7f283e2bbb13bc5ec5f0fc60c19ad | 84a38398d23baf98f2712ddfe23a88d2d012d5a4 | refs/heads/master | 2021-04-09T15:08:41.944507 | 2018-09-19T10:33:16 | 2018-09-19T10:33:16 | 125,663,095 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,902 | py | import javabridge as jb
from .utils import get_class_wrapper, java_matrix_to_numpy
class LinkTask():
"""Base abstract class for link-related scores."""
__PACKAGES__ = ['noesis.analysis.structure.links.prediction.local',
'noesis.analysis.structure.links.prediction.global']
__SCORE_TAIL__ = 'Score'
class LinkScorer(LinkTask):
"""This class implements the interface for link scorers. These algorithms
compute a score for each link according to certain specific rules.
Parameters
----------
scorer : string
Technique used to compute link scores. Currently supported techniques are:
- Local: 'CommonNeighbors', 'AdamicAdar', 'ResourceAllocation', 'PreferentialAttachment', 'HubDepressed', 'HubPromoted', 'Jaccard', 'LocalLeichtHolmeNewman', 'Salton', and 'Sorensen'.
- Global: 'Katz', 'RandomWalk', 'RandomWalkWithRestart', 'FlowPropagation', 'PseudoinverseLaplacian', 'AverageCommuteTime', 'RandomForestKernel', and 'GlobalLeichtHolmeNewman'.
args: parameters
Parameters for the link scorer. These parameters are specific
for each link scorer and more details are provided in NOESIS documentation.
"""
def __init__(self, scorer, *args):
self.scorer = scorer
self.args = args
def compute(self, network):
"""Compute scores for each link in a given network.
Parameters
----------
network : Network
Network for which the link scores will be computed.
Returns
-------
scores : list of tuples
A list of tuples with the format (source_node, target_node, link_score).
"""
class_wrapper = get_class_wrapper(self.scorer, LinkScorer.__PACKAGES__, LinkScorer.__SCORE_TAIL__)
link_predictor = class_wrapper(network.__o__, *self.args)
scorer_wrapper = get_class_wrapper('LinkScorer', ['noesis.analysis.structure.links.scoring'])
link_scorer = scorer_wrapper(network.__o__, link_predictor)
scores = link_scorer.call()
link_index = link_scorer.getLinkIndex()
result = [(link_index.source(i), link_index.destination(i), scores.get(i)) for i in range(link_index.links())]
return result
class LinkPredictor(LinkTask):
"""This class implements the interface for link predictors. These algorithms
compute a score for each pair of nodes according to certain specific rules.
Parameters
----------
scorer : string
Technique used to compute node pair scores. Currently supported techniques are:
- Local: 'CommonNeighbors', 'AdamicAdar', 'ResourceAllocation', 'PreferentialAttachment', 'HubDepressed', 'HubPromoted', 'Jaccard', 'LocalLeichtHolmeNewman', 'Salton', and 'Sorensen'.
- Global: 'Katz', 'RandomWalk', 'RandomWalkWithRestart', 'FlowPropagation', 'PseudoinverseLaplacian', 'AverageCommuteTime', 'RandomForestKernel', and 'GlobalLeichtHolmeNewman'.
args: parameters
Parameters for the link predictor. These parameters are specific
for each link predictor and more details are provided in NOESIS documentation.
"""
def __init__(self, predictor, *args):
self.predictor = predictor
self.args = args
def compute(self, network):
"""Compute scores for each pair of nodes in a given network.
Parameters
----------
network : Network
Network for which the node pair scores will be computed.
Returns
-------
scores : ndarray, shape (num_nodes,num_nodes)
Matrix of node pair scores.
"""
class_wrapper = get_class_wrapper(self.predictor, LinkPredictor.__PACKAGES__, LinkScorer.__SCORE_TAIL__)
link_predictor = class_wrapper(network.__o__, *self.args)
matrix = link_predictor.call()
return java_matrix_to_numpy(matrix) | [
"[email protected]"
] | |
8a79193a9965797088a1f5b46eadaffaa84fe9ae | 10512ea1aa0f4870dbcf22151743655f80e4119c | /wangyiyun_pl/wangyiyun_pl/settings.py | 1351d41e5341ddb2890cea03bc7171b7738cf668 | [] | no_license | 809069790/-one- | 18f34367c4af0a9a6cd45ee5bc150f15a993cfbc | c7edb25a75094c4faec845fc820623e465c7cbdb | refs/heads/master | 2020-03-28T03:07:08.689242 | 2018-09-06T05:36:35 | 2018-09-06T05:36:35 | 147,621,723 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,331 | py | # -*- coding: utf-8 -*-
# Scrapy settings for wangyiyun_pl project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'wangyiyun_pl'
SPIDER_MODULES = ['wangyiyun_pl.spiders']
NEWSPIDER_MODULE = 'wangyiyun_pl.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'wangyiyun_pl (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
# DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'wangyiyun_pl.middlewares.WangyiyunPlSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
DOWNLOADER_MIDDLEWARES = {
'wangyiyun_pl.middlewares.RandomHttpProxyMiddleware': 543,
# 'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware':543,
}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
'''写入'''
ITEM_PIPELINES = {
'wangyiyun_pl.pipelines.WangyiyunPlPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
# 数据库配置
MYSQL_HOST = '192.168.100.115'
MYSQL_DBNAME = 'wangyiyun'
MYSQL_USER = 'root'
MYSQL_PASSWD = '****'
| [
"[email protected]"
] | |
ba292c32cf83dce0c1b1f6d90d73548a523ad68b | 98e4dc41e3d994dfb55a2553c79d1b61590ecca6 | /LeetCode/Medium/Subarray Sum Equals K/sol.py | aa9bc85d1765a4c74100a8bfe8caf9119a4376d8 | [] | no_license | krohak/Project_Euler | b753c4f3bbf26a5eff3203e27482599d1e089fc6 | 1d8a2326543d69457f1971af9435b3e93ab32f52 | refs/heads/master | 2022-09-02T10:48:59.472111 | 2022-08-18T11:11:16 | 2022-08-18T11:11:16 | 111,204,162 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | class Solution:
def subarraySum(self, nums, target_sum):
cumulative_sum = {0:1}
counter = 0
summ = 0
for num in nums:
summ+=num
if (summ-target_sum) in cumulative_sum:
counter+=cumulative_sum[(summ-target_sum)]
cumulative_sum[summ] = cumulative_sum.get(summ, 0)+1
return counter
nums = [1,1,1,1,2,2,1,1]
sol = Solution().subarraySum(nums, 2)
print(sol) | [
"[email protected]"
] | |
04b637496bfe20e97493ed1a0315f36073109343 | 14c992d7c53d989fdaebb79848d548985ffa805b | /sifapi/planning/migrations/0005_auto_20210907_1304.py | 5660f0c77400e53b58cc9a954804b8a44fd850fb | [] | no_license | pierrotlemekcho/exaged | b0e699b1bcd04040e94c6c61ac57e6e2c11c5f93 | a64102312b310f80fae9f071b5eda71642b3d402 | refs/heads/master | 2023-03-08T04:23:47.109553 | 2022-03-28T09:48:54 | 2022-03-28T09:48:54 | 81,460,567 | 0 | 1 | null | 2023-03-04T06:37:24 | 2017-02-09T14:49:36 | Python | UTF-8 | Python | false | false | 975 | py | # Generated by Django 3.2.7 on 2021-09-07 13:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('planning', '0004_auto_20210907_1249'),
]
operations = [
migrations.AlterField(
model_name='lignedecommande',
name='exact_item',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='planning.article', to_field='exact_id'),
),
migrations.AlterField(
model_name='lignedecommande',
name='exact_order',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='planning.commande', to_field='exact_order_id'),
),
migrations.AlterField(
model_name='lignedecommande',
name='schedule_priority',
field=models.IntegerField(default=1),
),
]
| [
"[email protected]"
] | |
988bc023beff7901de42c542e51eefdcebde947b | 50068689d8e7a9d12715585b935874e0d06cabca | /apns/new_apns.py | afb8c084fc99a715d3760535dce30ca7fe477112 | [] | no_license | Bazinko/Course3-6 | ff87f326dcfcfcac3941ab5b290020ff4fdc2f99 | e73dbb9aedd2bb3e179c65d2cb91b543cb81974a | refs/heads/master | 2021-01-17T20:55:56.265743 | 2015-11-28T22:38:43 | 2015-11-28T22:38:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 885 | py | import time
import json
from apns import APNs, Frame, Payload
import sys
cert_file = 'cert.pem'
json_file = 'payload.json'
tokens = [
'74EFFB57F7D081B88AF222861ACC9BEA45F8B257EB1762D82265F494C6E1178D',
]
if __name__ == '__main__':
try:
custom_payload = json.loads(open(json_file, 'r').read())
except Exception:
sys.exit('Incorrect JSON')
apns = APNs(use_sandbox=True, cert_file=cert_file, enhanced=True)
payload = Payload(custom=custom_payload)
# Send multiple notifications in a single transmission
frame = Frame()
for i, token in enumerate(tokens, start=1):
frame.add_item(token, payload, i, time.time() + 3600, 10)
apns.gateway_server.send_notification_multiple(frame)
def response_listener(error_response):
print(error_response)
apns.gateway_server.register_response_listener(response_listener)
| [
"[email protected]"
] | |
1b0aae41bad07fa5a652c7824bf87c8f2c6abe40 | c76b5c3e52a0ce66039aba9c741a86f7aa830b3a | /env/Lib/site-packages/pylint_django/tests/input/external_psycopg2_noerror_postgres_fields.py | e5cb0ddedb85334cb148e993e30c40603b45b7e8 | [
"MIT"
] | permissive | GroovyCat/PRView-project | 9aed4b0d864f812cc0e52ae346ad1e65d93665d7 | 28d2e80dcc03f82316c68d8162f2ec96703cb503 | refs/heads/master | 2021-06-22T14:59:48.337129 | 2021-01-20T10:04:05 | 2021-01-20T10:04:05 | 184,058,883 | 1 | 0 | MIT | 2019-05-27T00:21:24 | 2019-04-29T11:32:11 | null | UTF-8 | Python | false | false | 1,626 | py | """
Checks that Pylint does not complain Postgres model fields.
"""
# pylint: disable=C0111,W5101
from __future__ import print_function
from django.contrib.postgres import fields
from django.db import models
class PostgresFieldsModel(models.Model):
arrayfield = fields.ArrayField(models.CharField())
hstorefield = fields.HStoreField()
jsonfield = fields.JSONField()
rangefield = fields.RangeField()
integerrangefield = fields.IntegerRangeField()
bigintegerrangefield = fields.BigIntegerRangeField()
floatrangefield = fields.FloatRangeField()
datetimerangefield = fields.DateTimeRangeField()
daterangefield = fields.DateRangeField()
def arrayfield_tests(self):
sorted_array = self.arrayfield.sort()
print(sorted_array)
def dictfield_tests(self):
print(self.hstorefield.keys())
print(self.hstorefield.values())
print(self.hstorefield.update({'foo': 'bar'}))
print(self.jsonfield.keys())
print(self.jsonfield.values())
print(self.jsonfield.update({'foo': 'bar'}))
def rangefield_tests(self):
print(self.rangefield.lower)
print(self.rangefield.upper)
print(self.integerrangefield.lower)
print(self.integerrangefield.upper)
print(self.bigintegerrangefield.lower)
print(self.bigintegerrangefield.upper)
print(self.floatrangefield.lower)
print(self.floatrangefield.upper)
print(self.datetimerangefield.lower)
print(self.datetimerangefield.upper)
print(self.daterangefield.lower)
print(self.daterangefield.upper)
| [
"[email protected]"
] | |
8f4e84e7264fc334cc8f44ecd296730e66f376d9 | bc4116c418a5136851b9956382d0195c0eec943f | /if, for, while, 실습1, 1차원배열/2439.py | fa8dad49d9df722e1ee090cc8db10b1b681e9fff | [] | no_license | bww7448/baekjoon_solution | 64d29eae8d1c0cc46c948b318c25f70ef4fad804 | a02d17681b268f5825f3f1c8bfa1f329f74e26bd | refs/heads/master | 2022-11-24T09:29:20.338162 | 2020-08-03T12:53:38 | 2020-08-03T12:53:38 | 283,445,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | N = int(input())
for i in range(1,N+1):
s = "*"*i
print(s.rjust(N)) | [
"[email protected]"
] | |
b4d04353919a7f6071d65606fccf5a77fe9f4b53 | 5bc59a84adc4854cdaa3af02be84fb4a70a85bb2 | /04-DRF-LEVEL-TWO/ebooksapi/ebooks/apps.py | ff3c9cb021a64b9a95e270a20b7cc6aafb2232cb | [] | no_license | furkalokbu/REST_API | be2a0a4c05dfca15b24420d1fa1d22524a851a0b | 55552d59a020ae969d4ef8dfda52207cf5c40c4c | refs/heads/main | 2023-05-05T21:36:16.856267 | 2021-05-25T19:37:18 | 2021-05-25T19:37:18 | 332,436,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from django.apps import AppConfig
class EbooksConfig(AppConfig):
name = "ebooks"
| [
"[email protected]"
] | |
d10426187a881b3186c505ac00bd2cca83c83986 | cf6ea609922165d6a4b5af24a6b15e1537d9d901 | /pylgrim/osmdump.py | 3486bde6d8a50caacd8df4aac4ab256e8756c5b9 | [] | no_license | josch/neo1973-germany | bbc9f0909a58e93b82fd515e9f9a066db7812091 | 90eaa81f2d3cdcf7af8f32298d77acfdf050be9a | refs/heads/master | 2021-01-23T11:50:35.611116 | 2009-02-18T14:45:55 | 2009-02-18T14:45:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,605 | py | #!/usr/bin/python
#coding=utf8
import os
import sys
import math
import time
z = 10
def download(x,y,z):
import urllib
try:
webFile = urllib.urlopen("http://a.tile.openstreetmap.org/%d/%d/%d.png"%(z,x,y))
if not os.path.exists("%d"%z):
os.mkdir("%d"%z)
if not os.path.exists("%d/%d"%(z,x)):
os.mkdir("%d/%d"%(z,x))
localFile = open("%d/%d/%d.png"%(z,x,y), 'w')
localFile.write(webFile.read())
webFile.close()
localFile.close()
except Exception, e:
print e
def lon2km(lat):
return math.cos(lat*math.pi/180)*2*math.pi*6378.137/360
def getxy(lat,lon,z):
x = (lon+180)/360 * 2**z
y = (1-math.log(math.tan(lat*math.pi/180) + 1/math.cos(lat*math.pi/180))/math.pi)/2 * 2**z
return int(x),int(y)
lat = 49.009051
lon = 8.402481
r = 10
lat1 = lat-r/111.32
lon1 = lon-r/lon2km(49.009051)
lat2 = lat+r/111.32
lon2 = lon+r/lon2km(49.009051)
tiles = 0
#do not download zoom 18
for z in range(5,18):
x1,y1 = getxy(lat1, lon1, z)
x2,y2 = getxy(lat2, lon2, z)
tiles += (x2+1-x1)*(y1+1-y2)
print "do you really want to download %d tiles? [Y/n]"%tiles,
data = sys.stdin.read(1)
if data in ("y", "Y"):
i = 1;
for z in range(5,18):
x1,y1 = getxy(lat1, lon1, z)
x2,y2 = getxy(lat2, lon2, z)
for x in xrange(x1,x2+1):
for y in xrange(y2,y1+1):
if not os.path.exists("%d/%d/%d.png"%(z,x,y)):
download(x,y,z)
print "\r%i"%i,
sys.stdout.flush()
i+=1
| [
"josch@46df4e5c-bc4e-4628-a0fc-830ba316316d"
] | josch@46df4e5c-bc4e-4628-a0fc-830ba316316d |
efb6c6d184449d74a7b1f9ca6c7912378d14655e | 4dccc7d5d379c152e3644002b57acb02c40a637f | /heap/connect_n_ropes.py | ee71e867692614c4da028cc1b932edfb9b5fd198 | [] | no_license | Niranjan-10/DSA-Practice | e22d37bfb25d24b9e4f9d288f73dbb59f5726ba6 | 9b9af427db2816a49df91dc55d94d3839a4a1fcb | refs/heads/main | 2023-08-21T19:34:49.852987 | 2021-10-05T13:36:18 | 2021-10-05T13:36:18 | 410,946,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | import heapq
def connectNropes(minheap):
ans = 0
while(len(minheap)>1):
first = heapq.heappop(minheap)
second = heapq.heappop(minheap)
sum = first+second
ans+=sum
heapq.heappush(minheap,sum)
return ans
if __name__ == "__main__":
arr = [ 4, 3, 2, 6]
minheap =[]
for i in arr:
heapq.heappush(minheap,i)
print(minheap)
print(connectNropes(minheap))
| [
"[email protected]"
] | |
7af9afcf305e3ff34fd960826dff35f494aca296 | beb93da9233c8c000a5c2e34a0aeb5cc24de6632 | /hackathon/hackathon/challenges/migrations/0005_auto_20201011_0500.py | 9c535982695038c43512ae64d75fcfb75ddc3ed8 | [] | no_license | Treelovah/Django | 58707b3c4aa0c92c46c49f711fbfa8af70c84652 | 1fad7f677f2efa3dba854f22b3bbbe914171ab5a | refs/heads/main | 2023-02-07T03:08:59.006324 | 2020-12-21T01:37:49 | 2020-12-21T01:37:49 | 322,059,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | # Generated by Django 3.1.1 on 2020-10-11 05:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('challenges', '0004_auto_20201011_0449'),
]
operations = [
migrations.AlterField(
model_name='team',
name='team_name',
field=models.CharField(choices=[('kss', 'KryptSec'), ('hsh', 'hashdump'), ('mom', 'HackerMoms')], max_length=3),
),
]
| [
"[email protected]"
] | |
732ed6a1db0038aacd058257115f7fce3e73f3e9 | 53e17ccb4270e31058cc50dd3c5874f6fa619ee1 | /traditional_boxCounting.py | e135fc8196b67f0cb9bb65ca364cd923dbd77bfe | [
"MIT"
] | permissive | muchway2019/PPIN | a1a52fdc673d427e848aa3b240d43dec8fcbe743 | 5149b094ef1dc2e4628d4e1bb4594e5b7c40d440 | refs/heads/master | 2020-11-30T02:43:07.499843 | 2019-01-31T14:13:53 | 2019-01-31T14:13:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,039 | py | #this preliminary code only calculates the number of boxes of length 2 required to cover the whole network
#once you run the code, you can see how the nodes are removed, how the edges are reconnected, and how the number of boxes are increasing
import matplotlib.pyplot as plt
import networkx as nx
from math import log
from pylab import *
def build_lattice_graph(n):
"""
Build lattice graph with n*n nodes
"""
if n < 2:
raise ValueError
G = nx.Graph()
G.add_nodes_from([i for i in range(n * n)])
for i in range(n):
for j in range(n - 1):
idx = i * n + j
G.add_edge(idx, idx + 1)
for i in range(n - 1):
for j in range(n):
idx = i * n + j
G.add_edge(idx, idx + n)
return G
def InitialPoint(graph):
allneighbors = []
minList = []
d = {}
for i in list(graph.nodes):
d.update({i:len(list(graph.adj[i]))})
startingNode = min(d, key=d.get)
return startingNode
#instead of adj check, do nx.shortest_path_length
def adj_check(graph,startingpoint):
a = [startingpoint] + list(graph.adj[startingpoint])
adjTOadj = []
for i in a:
adjTOadj.append(list(graph.adj[i]))
return list(set(adjTOadj[0]).intersection(*adjTOadj[:1]))
def remove_duplicates(values):
output = []
seen = set()
for value in values:
# If value has not been encountered yet,
# ... add it to both list and set.
if value not in seen:
output.append(value)
seen.add(value)
return output
#checkList=[]
#for i in range(1,n+1):
# checkList.append([[u]+path for neighbor in G.neighbors(u) for path in findPaths(G,neighbor,n-1) if u not in path])
#checkList = [item for sublist in checkList for item in sublist]
def findPaths(G,u,n):
if n==0:
return [[u]]
paths = [[u]+path for neighbor in G.neighbors(u) for path in findPaths(G,neighbor,n-1) if u not in path]
return paths
def shortestPath(G,u,n,paths):
check = 0
s_path = []
for i in paths:
temp = list(nx.all_shortest_paths(G,source=i[0],target=i[-1]))
#print (i)
#print (temp)
#print("-----------")
if i in temp:
check += 1
s_path.append(i)
#print(i)
if check==0:
return ("CHANGE BOX SIZE")
else:
s_path = [item for sublist in s_path for item in sublist]
s_path = remove_duplicates(s_path)
return s_path
def isCompact(graph,diameter,box_length):
#temp_diameter = diameter.copy()
for i in diameter[:-1]:
for j in diameter[i:]:
if nx.shortest_path_length(graph,i,j) >= box_length+1:
#print("BOX IS NOT COMPACT")
#print(j)
#print(i)
#print(nx.shortest_path_length(graph,i,j))
diameter.remove(j)
#print(diameter)
return diameter
BOX_SIZE = 12
BOX_SIZE -= 1
#H=build_lattice_graph(4)
#H = nx.path_graph(20)
#nodes, edges = 14, 21
#H = nx.gnm_random_graph(nodes, edges)
#H=ErdosRenyiGraph.copy()
#nx.draw(H, with_labels=True)
#plt.show()
box_count = 0
xAxis=[]
yAxis=[]
while(BOX_SIZE > 0):
H=build_lattice_graph(80)
#H = nx.path_graph(6)
#nx.draw(H, with_labels=True)
#plt.show()
box_count = 0
while(len(H.nodes)>1):
Start = InitialPoint(H)
all_paths = findPaths(H,Start,BOX_SIZE)
unique_path = shortestPath(H,Start,BOX_SIZE,all_paths)
if unique_path == "CHANGE BOX SIZE":
box_count += 1
#print(box_count)
break
compact_box = isCompact(H,unique_path,BOX_SIZE)
box_count += 1
#remove all the nodes inside the box
for s in compact_box:
H.remove_node(s)
#print("START POINT:")
#print(Start)
#print("BOX COUNT:")
#print (box_count)
#print("DELETED NODES:")
#print(compact_box)
#print ("LINKING NODES")
#print (relevant_nodes)
#print("NUMBER OF REMAINING NODES:")
#print (len(H.nodes))
#nx.draw(H, with_labels=True)
#plt.show()
#print(box_count)
#print(BOX_SIZE)
print(BOX_SIZE+1)
print(box_count)
xAxis.append(BOX_SIZE+1)
BOX_SIZE -= 1
yAxis.append(box_count)
x=np.log(xAxis)
y=np.log(yAxis)
m,b = np.polyfit(x, y, 1)
plot(x, y, 'yo', x, m*x+b, '--k')
show()
print(m)
| [
"[email protected]"
] | |
35637523406a5f1bc4a8626784efa3ef9b2464ec | 1259d1f9e81a68606fa25315ad3915a9ddd7f599 | /env/bin/pyrsa-decrypt | a809fe68bf0155c0400fa8cb67a6a36fc33560d0 | [] | no_license | Adridut/flaskr | 1c33dc2d35f2d5b559cf6f3f40775d465d401fe7 | 94e7c6b5fd634f0ee0ef48a8e07b9c6f6b1d405a | refs/heads/master | 2021-01-21T06:48:59.125054 | 2017-02-27T08:26:45 | 2017-02-27T08:26:45 | 83,286,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | #!/home/adrien/Dev/flaskr/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import decrypt
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(decrypt())
| [
"[email protected]"
] | ||
37b2e53d04f0dc324981033f3407a9c4aaec00c2 | 2780b2e24fd6c128c865e83477dad8ec6d1b6f38 | /Live_face_detection.py | 212d84eefbbf3d056970c3fe15f21d2869c09845 | [] | no_license | Prasoon950/OPEN_CV | 70c28ce4c6600f31d8cb031d24bda89abfc74f30 | d121f5159b82d93ecc99192610143016ca231ec8 | refs/heads/master | 2021-03-16T15:25:27.805550 | 2020-03-12T20:05:49 | 2020-03-12T20:05:49 | 246,920,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,137 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 13 01:09:55 2020
@author: DELL
"""
import numpy as np
import cv2
face_classifier = cv2.CascadeClassifier("F:\zipped\Computer-Vision-Tutorial-master\Haarcascades\haarcascade_frontalface_default.xml")
eye_classifier = cv2.CascadeClassifier("F:\zipped\Computer-Vision-Tutorial-master\Haarcascadeshaarcascade_eye.xml")
def detect(gray, frame):
face = face_classifier.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in face:
cv2.rectangle(frame, (x,y),(x+w,y+h),(127,0,255),2,2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = image[y:y+h, x:x+w]
eye = eye_classifier.detectMultiScale(roi_gray, 1.1, 3)
for (ex,ey,ew,eh) in eye:
cv2.rectangle(roi_color, (ex,ey), (ex+ew,ey+eh),(255,255,0),2)
return frame
video_capture = cv2.VideoCapture(0)
while True:
_,frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
canvas = detect(gray, frame)
cv2.imshow("canvas", canvas)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
7b041ec9174315d209a07d3788e55af41992fa48 | 6656d7e426c85ff977a2619f6a52981456ada9fe | /2_Arrays/4.py | 2d98a1503d3a9f682454b109abf31f63ca746397 | [
"MIT"
] | permissive | abphilip-codes/Hackerrank_Interview | 4f37e12f1ab47042db88947bb3a31aed6486b54d | b40981ef55d04fb14d81a6e1c9ade1878f59394d | refs/heads/master | 2023-08-02T22:00:37.298224 | 2021-10-02T11:59:08 | 2021-10-02T11:59:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | # https://www.hackerrank.com/challenges/minimum-swaps-2/problem
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the minimumSwaps function below.
def minimumSwaps(a):
k,z=0,dict(zip(a,range(1,len(a)+1)))
for i in range(1,len(a)+1):
if z[i]!=i:
z[a[i-1]]=z[i]
a[z[i]-1]=a[i-1]
k+=1
return k
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
arr = list(map(int, input().rstrip().split()))
res = minimumSwaps(arr)
fptr.write(str(res) + '\n')
fptr.close() | [
"[email protected]"
] | |
9407153003d4258fda6044c326d36c44a4358c76 | 6ff62bd52c889483dd2445d6c7c7239d8ac8f870 | /NBIOT_position_system/real_time_monitoring/migrations/0011_auto_20180717_1423.py | dfb6f39ed9bd9dada0ed242b07e0c7998b6964a0 | [] | no_license | zhangc-rw/position_service | 830c1e5fbf4d9bc4a307aa663d4193dc0214f5b7 | 104b81dc40ff777a94ee18ff63422c89c5a20cf3 | refs/heads/master | 2020-03-12T13:37:52.967126 | 2018-12-06T08:18:30 | 2018-12-06T08:18:30 | 130,646,760 | 0 | 1 | null | 2018-04-24T01:35:52 | 2018-04-23T05:55:45 | Python | UTF-8 | Python | false | false | 1,792 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-07-17 06:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('device_management', '0009_auto_20180717_1423'),
('real_time_monitoring', '0010_auto_20180425_1050'),
]
operations = [
migrations.CreateModel(
name='Past_Target',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('update_time', models.DateTimeField(auto_now=True, null=True)),
('message_time', models.DateTimeField()),
('working_status', models.CharField(max_length=20)),
('base_num', models.CharField(max_length=80)),
('cell_num', models.CharField(max_length=80)),
('location_time', models.DateTimeField()),
('coordinates', models.CharField(max_length=60)),
('velocity', models.FloatField(default=0)),
('moving_direction', models.FloatField(default=0)),
('height', models.FloatField(default=0)),
('device', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='device_management.Device')),
],
),
migrations.AlterField(
model_name='target',
name='coordinates',
field=models.CharField(max_length=60),
),
migrations.AlterField(
model_name='target',
name='device',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='device_management.Device'),
),
]
| [
"[email protected]"
] | |
4eff3ee86176474b0f5ada0af11864b69625c3c0 | ec551303265c269bf1855fe1a30fdffe9bc894b6 | /topic10_queue/T933_RecentCounter/interview.py | 8bd0e8689824bc5d8875b7a6fa5e0244cd77e011 | [] | no_license | GongFuXiong/leetcode | 27dbda7a5ced630ae2ae65e19d418ebbc65ae167 | f831fd9603592ae5bee3679924f962a3ebce381c | refs/heads/master | 2023-06-25T01:05:45.683510 | 2021-07-26T10:05:25 | 2021-07-26T10:05:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | '''
933. 最近的请求次数
写一个 RecentCounter 类来计算最近的请求。
它只有一个方法:ping(int t),其中 t 代表以毫秒为单位的某个时间。
返回从 3000 毫秒前到现在的 ping 数。
任何处于 [t - 3000, t] 时间范围之内的 ping 都将会被计算在内,包括当前(指 t 时刻)的 ping。
保证每次对 ping 的调用都使用比之前更大的 t 值。
示例:
输入:inputs = ["RecentCounter","ping","ping","ping","ping"], inputs = [[],[1],[100],[3001],[3002]]
输出:[null,1,2,3,3]
提示:
每个测试用例最多调用 10000 次 ping。
每个测试用例会使用严格递增的 t 值来调用 ping。
每次调用 ping 都有 1 <= t <= 10^9。
'''
import collections
class RecentCounter:
def __init__(self):
self.deque = collections.deque()
def ping(self, t):
self.deque.append(t)
while self.deque[0] < t-3000:
self.deque.popleft()
return len(self.deque)
if __name__ == "__main__":
solution = RecentCounter()
while 1:
str1 = input()
if str1 != "":
num1 = int(str1)
res = solution.ping(num1)
print(res)
else:
break
| [
"[email protected]"
] | |
4be4c858a411ba9e30fcb9d748020692c677826b | 60e4b90a82cb5e64ba61a68fd5cac13b0b4e3297 | /{{cookiecutter.github_repository_name}}/{{cookiecutter.app_name}}/users/serializers.py | 582f5353baf7f90f3e9ac396aa6667c1a1816bbf | [
"MIT"
] | permissive | mashrikt/cookiecutter-django-dokku | 768a29974b99a67a8c2e2b73d7bf86c155899cae | e78a27d78c2b1fea47c1dc40449ad542ca0b7ee6 | refs/heads/master | 2020-03-22T08:28:50.214705 | 2019-07-25T19:04:15 | 2019-07-25T19:04:15 | 139,769,068 | 19 | 6 | MIT | 2020-02-12T01:05:45 | 2018-07-04T21:58:26 | Python | UTF-8 | Python | false | false | 2,906 | py | from allauth.account.adapter import get_adapter
from django.contrib.auth import get_user_model
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.serializers import ModelSerializer, Serializer, ValidationError
User = get_user_model()
class RegisterSerializer(serializers.Serializer):
email = serializers.EmailField(required=True)
password1 = serializers.CharField(write_only=True)
password2 = serializers.CharField(write_only=True)
def validate_email(self, email):
return get_adapter().validate_unique_email(email)
def validate_password1(self, password):
return get_adapter().clean_password(password)
def validate(self, data):
if data['password1'] != data['password2']:
raise serializers.ValidationError(_("The two password fields didn't match."))
return data
def get_cleaned_data(self):
return {
'email': self.validated_data.get('email', '')
}
def save(self, request):
password = self.validated_data.pop('password1', None)
cleaned_data = self.get_cleaned_data()
user = User.objects.create(**cleaned_data)
user.set_password(password)
user.save()
return user
class LoginSerializer(Serializer):
email = serializers.EmailField(required=False)
password = serializers.CharField(write_only=True, style={'input_type': 'password'})
def _validate_email(self, email, password):
# Should return 404 if no user found with this email
# This is intentional as per requirements and specification
user = get_object_or_404(User, email__iexact=email)
if user and user.check_password(password):
return user
def validate(self, attrs):
email = attrs.get('email')
password = attrs.get('password')
if email and password:
user = self._validate_email(email, password)
else:
msg = _('Must include "email" and "password".')
raise ValidationError(msg)
if not user:
msg = _('Unable to log in with provided credentials.')
raise ValidationError(msg)
if not user.is_active:
msg = _('User account is disabled.')
raise ValidationError(msg)
# Everything passed. That means password is accepted. So return the user
attrs['user'] = user
return attrs
class UserDetailsSerializer(ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name', 'email', 'is_superuser')
read_only_fields = ('is_superuser',)
class UserPublicSerializer(ModelSerializer):
full_name = serializers.CharField(source='get_full_name')
class Meta:
model = User
fields = ('id', 'full_name', 'email')
| [
"[email protected]"
] | |
0a58a94a0291c9eee74ec90033a491790733ec6e | 55e28e35db5bf6a844df3fb47080500b115a893e | /day10/select/select_server.py | 009bb5773e7561fb2d56689d463ea451aefcc9ee | [] | no_license | pylarva/Python | 5743ffa4a69db42b642d51b62f9e9b69ddbc1a72 | 71b484950e6dbdcf708726a68a3386d0d6ddc07f | refs/heads/master | 2020-04-19T09:11:11.195393 | 2017-11-16T07:32:59 | 2017-11-16T07:32:59 | 67,507,687 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,258 | py | # !/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:pylarva
# bolg:www.lichengbing.com
__author__ = 'Alex Li'
import select
import socket
import sys
import queue
server = socket.socket()
server.setblocking(0)
server_addr = ('localhost', 10000)
print('starting up on %s port %s' % server_addr)
server.bind(server_addr)
server.listen(5)
inputs = [server, ] #自己也要监测呀,因为server本身也是个fd
outputs = []
message_queues = {}
while True:
print("waiting for next event...")
readable, writeable, exeptional = select.select(inputs,outputs,inputs) #如果没有任何fd就绪,那程序就会一直阻塞在这里
for s in readable: #每个s就是一个socket
if s is server: #别忘记,上面我们server自己也当做一个fd放在了inputs列表里,传给了select,如果这个s是server,代表server这个fd就绪了,
#就是有活动了, 什么情况下它才有活动? 当然 是有新连接进来的时候 呀
#新连接进来了,接受这个连接
conn, client_addr = s.accept()
print("new connection from",client_addr)
conn.setblocking(0)
inputs.append(conn) #为了不阻塞整个程序,我们不会立刻在这里开始接收客户端发来的数据, 把它放到inputs里, 下一次loop时,这个新连接
#就会被交给select去监听,如果这个连接的客户端发来了数据 ,那这个连接的fd在server端就会变成就续的,select就会把这个连接返回,返回到
#readable 列表里,然后你就可以loop readable列表,取出这个连接,开始接收数据了, 下面就是这么干 的
message_queues[conn] = queue.Queue() #接收到客户端的数据后,不立刻返回 ,暂存在队列里,以后发送
else: #s不是server的话,那就只能是一个 与客户端建立的连接的fd了
#客户端的数据过来了,在这接收
data = s.recv(1024)
if data:
print("收到来自[%s]的数据:" % s.getpeername()[0], data)
message_queues[s].put(data) #收到的数据先放到queue里,一会返回给客户端
if s not in outputs:
outputs.append(s) #为了不影响处理与其它客户端的连接 , 这里不立刻返回数据给客户端
else:#如果收不到data代表什么呢? 代表客户端断开了呀
print("客户端断开了",s)
if s in outputs:
outputs.remove(s) #清理已断开的连接
inputs.remove(s) #清理已断开的连接
del message_queues[s] ##清理已断开的连接
for s in writeable:
try :
next_msg = message_queues[s].get_nowait()
except queue.Empty:
print("client [%s]" %s.getpeername()[0], "queue is empty..")
outputs.remove(s)
else:
print("sending msg to [%s]"%s.getpeername()[0], next_msg)
s.send(next_msg.upper())
for s in exeptional:
print("handling exception for ", s.getpeername())
inputs.remove(s)
if s in outputs:
outputs.remove(s)
s.close()
del message_queues[s] | [
"[email protected]"
] | |
315e0a44b6c237ed7a6e9ed6807d3222de0857a3 | 7837cd1bee1a9abd623600cf30c2f462da48d558 | /aaa.py | 1123a79ae464804cd597a4e45e9d9c4e5f526712 | [] | no_license | hal1932/astor_test | 8285b3b8c1fa187b7cd3c8d147c8a75d8e4ba207 | e14c7de55bb6e947e41387d33fff5286bbea4570 | refs/heads/master | 2021-08-30T09:37:33.083995 | 2017-12-17T08:36:12 | 2017-12-17T08:36:12 | 114,521,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | # encoding: utf-8
import functools
def deco1(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print 'deco1 start'
func(*args, **kwargs)
print 'deco1 end'
return wrapper
def deco2(*arg, **kwarg):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print 'deco2 start'
func(*args, **kwargs)
print 'deco2 end'
return wrapper
return decorator
def func1(arg1):
print arg1
x = 1
print x
@deco1
def func2(arg):
print arg
@deco2('hoge', 1, a=2.0)
def func3(arg):
print arg
def main():
func1('aaa')
func2('bbb')
func3('ccc')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1f8b127b062ecf02e219e709909e74fc07adc91a | 58e408afaeb04b92c89980266c2ca906386e75e8 | /n_primo/solutions/correct.py | 3a508988ca33c7ea0dd3307506c1ad673e41d1ba | [] | no_license | Angelox547/TuringArena-PCTO | b45fee33c886bb6a5216bd78d6777617852c1222 | 5db78428caf475eea66f60c3edeaa8e1dfd71e67 | refs/heads/master | 2020-05-31T06:41:37.235099 | 2019-09-14T13:13:50 | 2019-09-14T13:13:50 | 190,147,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | def primo(a):
i = 2
while i < a/2+1:
if a % i == 0:
return 0
i += 1
return 1
| [
"[email protected]"
] | |
760c46b1f182472c11a3cb0026781c521fddb142 | a943cb6da95ec1e06cb480887ba1062a5783527f | /2012-aqs/figures/plot-smh-norris.py | 262432f7ea056293f457ca60b89fe0a54119ed39 | [] | no_license | andycasey/papers | 1b2c882c20b0c65b5899d70dc95825ec53cc9fe2 | 3d585ad4b6b1c3b40227185fd7b22ea9bdeb8e02 | refs/heads/master | 2021-01-19T17:24:48.788580 | 2013-08-13T08:51:02 | 2013-08-13T08:51:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,155 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
wavelength, smh_ew, norris_ew = np.loadtxt('SMH-Norris-comparison.data', usecols=(0, 1, 2, ), unpack=True)
fig = plt.figure(figsize=(6,7))
fig.subplots_adjust(hspace=0.0, wspace=0.0)
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax1 = fig.add_subplot(gs[0])
#ax1 = plt.subplot2grid((3, 1), (0, 0))
ax1.scatter(smh_ew, smh_ew - norris_ew, facecolor='none', edgecolor='k', marker='+')
ax1.plot([0, 200], [0, 0], 'k-', zorder=-1)
A = np.vstack([smh_ew, np.ones(len(norris_ew))]).T
m, c = np.linalg.lstsq(A, smh_ew - norris_ew)[0]
x = np.array([np.min(smh_ew), np.max(smh_ew)])
ax1.plot(x, m * x + c, 'k:')
ylim = np.max(np.abs(np.array(ax1.get_ylim())))
ax1.set_ylim(-15, 15)
ax1.xaxis.set_visible(False)
ax1.set_ylabel('$\Delta{}W_\lambda$ [m$\AA{}$]')
ax2 = fig.add_subplot(gs[1], sharex=ax1)
#ax2 = plt.subplot2grid((3, 1), (1, 0), rowspan=2)
ax2.scatter(smh_ew, norris_ew, facecolor='none', edgecolor='k', marker='+')
A = np.vstack([norris_ew, np.ones(len(norris_ew))]).T
m, c = np.linalg.lstsq(A, smh_ew)[0]
x = np.array([0, 200])
ax2.plot(x, x, 'k-', zorder=-1)
x = np.array([np.min(smh_ew), np.max(smh_ew)])
ax2.plot(x, m * x + c, 'k:')
# Plot an error cone
error = 10 # percent
bounds = np.array([0, 160])
#ax2.plot(bounds, bounds * (1 + error/100.), '-', c='#aaaaaa', zorder=-5)
#ax2.plot(bounds, bounds * (1 - error/100.), '-', c='#aaaaaa', zorder=-5)
ax1.set_xlim(bounds)
ax2.set_xlim(bounds)
ax2.set_ylim(bounds)
ax2.set_xlabel('$W_\lambda$ (This work, automatic) [m$\AA{}$]')
ax2.set_ylabel('$W_\lambda$ (Norris et al. 1996) [m$\AA{}$]')
ax2.get_yticklabels()[-1].set_visible(False)
ax1.get_yticklabels()[0].set_visible(False)
ax1.get_yticklabels()[-1].set_visible(False)
ax1.text(5, 10, '$\langle{}\Delta{}W_\lambda\\rangle{}\,=\,-0.64\,\pm\,2.78\,$m${\AA}$', color='k', verticalalignment='center')
ax2.text(5, 150, "$a_0\,=\,%1.2f$\n$a_1\,=\,%1.2f$\n$N\,=\,%i$" % (c, m, len(smh_ew)), verticalalignment='top')
#ax1.set_title('%i lines in HD 140283' % (len(smh_ew), ))
plt.savefig('smh-norris.pdf')
plt.savefig('smh-norris.eps')
| [
"[email protected]"
] | |
6298875d8e11878aa23517f122c8a75e9d106d46 | 38fff7bdefd8d62a740d51329b50d0e1e49258bb | /projects/smart_open/fuzz_zip.py | 3a7f08c09ad89f312bad1a8251882d276259b866 | [
"Apache-2.0"
] | permissive | google/oss-fuzz | 026384c2ada61ef68b147548e830f60730c5e738 | f0275421f84b8f80ee767fb9230134ac97cb687b | refs/heads/master | 2023-08-31T23:30:28.157702 | 2023-08-31T21:49:30 | 2023-08-31T21:49:30 | 63,809,205 | 9,438 | 2,315 | Apache-2.0 | 2023-09-14T20:32:19 | 2016-07-20T19:39:50 | Shell | UTF-8 | Python | false | false | 1,605 | py | #!/usr/bin/python3
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atheris
import sys
import os
with atheris.instrument_imports():
from smart_open import open
import zipfile
import tempfile
def TestInput(data):
if len(data) < 10:
return
fdp = atheris.FuzzedDataProvider(data)
tmp = tempfile.NamedTemporaryFile(prefix=fdp.ConsumeString(10), suffix=fdp.ConsumeString(4), delete=False)
filestr = fdp.ConsumeString(100)
with open(tmp.name, 'wb') as f:
with zipfile.ZipFile(f, 'w') as zip:
zip.writestr(fdp.ConsumeString(10), filestr)
zip.writestr(fdp.ConsumeString(10), filestr)
with open(tmp.name, 'rb') as f:
with zipfile.ZipFile(f) as zip:
for info in zip.infolist():
file_bytes = zip.read(info.filename)
assert filestr == file_bytes.decode('utf-8')
os.unlink(tmp.name)
def main():
atheris.Setup(sys.argv, TestInput, enable_python_coverage=True)
atheris.instrument_all()
atheris.Fuzz()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
09b9cc6b9508e65c23efd41125168aad565a87ae | 67084751337f327092049a61b6d8c630cd3767de | /wrap_markdown_content/WrapMarkdown.py | 92e05964ce3197a455f52ff7c71894ba486065dd | [
"MIT"
] | permissive | habereet/awesomeScripts | 0f0a33d9a35de8b4449c1263db4407beaf0178fa | 2d77f3619314aa7840fed57e0cf451fe5c1a70a9 | refs/heads/master | 2023-01-05T01:29:30.063154 | 2020-10-31T00:36:16 | 2020-10-31T00:36:16 | 303,189,908 | 0 | 0 | MIT | 2020-10-11T18:48:07 | 2020-10-11T18:48:06 | null | UTF-8 | Python | false | false | 1,286 | py | import sys
class Buffer:
def __init__(self, filename: str):
self.input_file = open(filename, 'r')
self.all_lines = self.input_file.readlines()
def trim(self, ncolumns: int):
for line in self.all_lines:
if line == '\n' or line == '\t':
print(line, end='')
elif len(line) < ncolumns:
print(line)
elif len(line) > ncolumns:
nCharsInline = 0
for word in line.split(' '):
if (nCharsInline + len(word) > ncolumns):
if len(word) > ncolumns:
print(word, end='')
print('\n', end='')
nCharsInline = 0
pass
print(word, end=' ')
nCharsInline += len(word) + 1
pass
def Main(args: list):
if len(args) < 3:
print('Missing parameters.')
print(f'Use: python {args[0]} <maxcolumns> <path/to/file>')
exit(0)
try:
ncol = int(args[1])
except TypeError:
print(f'Not possible to convert "{args[1]}" to integer')
exit(1)
buff = Buffer(args[2])
buff.trim(ncol)
if __name__ == '__main__':
Main(sys.argv)
| [
"[email protected]"
] | |
bfcc4f82ae5fd44b4414bb887094046c13bb3e10 | c0fad90611a6e943277c3d79eeb48ccd5f0d0a88 | /29divide.py | 6cee6413834a3e4bbc05b6458fb1114fdad5b765 | [] | no_license | lmb633/leetcode | e2da31984af07b9e16787f4d57f82dab2dcb551a | d91568d245dd8fb66f46ff73737cbad974f490a6 | refs/heads/master | 2021-07-19T16:07:40.864854 | 2021-02-24T10:57:40 | 2021-02-24T10:57:40 | 243,146,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | class Solution(object):
def divide(self, dividend, divisor):
if dividend == 0:
return 0
if dividend == -2 ** 31 and divisor == -1:
return 2 ** 31 - 1
flag = 1
if dividend ^ divisor < 0:
flag = -1
dividend = abs(dividend)
divisor = abs(divisor)
result = 0
for i in range(31, -1, -1):
if (dividend >> i) >= divisor:
result += (1 << i)
dividend -= divisor << i
return result if flag > 0 else -result
| [
"[email protected]"
] | |
06cf24deb1ada46c90e371a9923156badefe36f7 | 1011b26dbae56326653a57acb243a8c678a5cbb4 | /CNN-UNet/Data_functions/plot_functions.py | 8a399671d50084fc0e47b1a7164cd6f0b52a559f | [] | no_license | yxu233/Myelin_cleaned_locally | 2986ab702036d30f815c0fda51cecef6e1ded9ef | 735f2680b3ca0b6f60fe3c7c21a443d8016183fa | refs/heads/master | 2022-11-23T17:11:50.619420 | 2020-07-29T05:01:55 | 2020-07-29T05:01:55 | 283,311,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,082 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Dec 31 16:16:39 2017
@author: Tiger
"""
import tensorflow as tf
import math
import pylab as mpl
import numpy as np
import time
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from PIL import Image
import random
from skimage import measure
""" ADDS TEXT TO IMAGE and saves the image """
def add_text_to_image(all_fibers, overlay_im, filename='default.png', filename_overlay ='default.png', resolution=800):
#fiber_img = Image.fromarray((all_fibers *255).astype(np.uint16)) # ORIGINAL, for 8GB CPU
fiber_img = (all_fibers*255).astype(np.uint16)
plt.figure(80, figsize=(12,10)); plt.clf(); plt.imshow(fiber_img)
plt.axis('off')
plt.figure(81, figsize=(12,10)); plt.clf(); plt.imshow(overlay_im)
plt.axis('off')
# PRINT TEXT ONTO IMAGE
binary_all_fibers = all_fibers > 0
labelled = measure.label(binary_all_fibers)
cc_overlap = measure.regionprops(labelled, intensity_image=all_fibers)
# Make a list of random colors corresponding to all the cells
list_fibers = []
for Q in range(int(np.max(all_fibers) + 1)):
color = [random.randint(0,255)/256, random.randint(0,255)/256, random.randint(0,255)/256]
list_fibers.append(color)
for Q in range(len(cc_overlap)):
overlap_coords = cc_overlap[Q]['coords']
new_num = cc_overlap[Q]['MinIntensity']
#if cell_num != new_num:
#color = [random.randint(0,255)/256, random.randint(0,255)/256, random.randint(0,255)/256]
#cell_num = new_num
color = list_fibers[int(new_num)]
plt.figure(80)
plt.text(overlap_coords[0][1], overlap_coords[0][0], str(int(new_num)), fontsize= 2, color=color)
plt.figure(81)
plt.text(overlap_coords[0][1], overlap_coords[0][0], str(int(new_num)), fontsize= 2, color=color)
#plt.savefig(filename, dpi = resolution)
plt.savefig(filename_overlay, dpi = resolution)
""" Scales the normalized images to be within [0, 1], thus allowing it to be displayed
"""
def show_norm(im):
m,M = im.min(),im.max()
plt.imshow((im - m) / (M - m))
plt.show()
""" Originally from Intro_to_deep_learning workshop
"""
def plotOutput(layer,feed_dict,fieldShape=None,channel=None,figOffset=1,cmap=None):
# Output summary
W = layer
wp = W.eval(feed_dict=feed_dict);
if len(np.shape(wp)) < 4: # Fully connected layer, has no shape
temp = np.zeros(np.product(fieldShape)); temp[0:np.shape(wp.ravel())[0]] = wp.ravel()
fields = np.reshape(temp,[1]+fieldShape)
else: # Convolutional layer already has shape
wp = np.rollaxis(wp,3,0)
features, channels, iy,ix = np.shape(wp) # where "features" is the number of "filters"
if channel is not None:
fields = wp[:,channel,:,:]
else:
fields = np.reshape(wp,[features*channels,iy,ix]) # all to remove "channels" axis
perRow = int(math.floor(math.sqrt(fields.shape[0])))
perColumn = int(math.ceil(fields.shape[0]/float(perRow)))
fields2 = np.vstack([fields,np.zeros([perRow*perColumn-fields.shape[0]] + list(fields.shape[1:]))]) # adds more zero filters...
tiled = []
for i in range(0,perColumn*perRow,perColumn):
tiled.append(np.hstack(fields2[i:i+perColumn])) # stacks horizontally together ALL the filters
tiled = np.vstack(tiled) # then stacks itself on itself
if figOffset is not None:
mpl.figure(figOffset); mpl.clf();
mpl.imshow(tiled,cmap=cmap); mpl.title('%s Output' % layer.name); mpl.colorbar();
""" Plot layers
"""
def plotLayers(feed_dict, L1, L2, L3, L4, L5, L6, L8, L9, L10):
plt.figure('Down_Layers');
plt.clf()
plt.subplot(221); plotOutput(L1,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(222); plotOutput(L2,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(233); plotOutput(L3,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(234); plotOutput(L5,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(223); plotOutput(L4,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.pause(0.05)
plt.figure('Up_Layers');
plt.clf()
plt.subplot(221); plotOutput(L6,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(222); plotOutput(L8,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(223); plotOutput(L9,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.subplot(224); plotOutput(L10,feed_dict=feed_dict,cmap='inferno',figOffset=None);
plt.pause(0.05);
""" Plots global and detailed cost functions
"""
def plot_cost_fun(plot_cost, plot_cost_val, plot_cost_val_NO=None):
""" Graph global loss
"""
plt.figure(18); plt.clf();
plt.plot(plot_cost, label='Training'); plt.title('Global Loss')
plt.ylabel('Loss'); plt.xlabel('Epochs'); plt.pause(0.05)
# cross-validation
plt.figure(18); plt.plot(plot_cost_val, label='Cross_validation'); plt.pause(0.05)
plt.legend(loc='upper left');
""" Graph detailed plot
"""
last_loss = len(plot_cost)
start = 0
if last_loss < 50:
start = 0
elif last_loss < 200:
start = last_loss - 50
elif last_loss < 500:
start = last_loss - 200
elif last_loss < 1500:
start = last_loss - 500
else:
start = last_loss - 1500
plt.close(19);
x_idx = list(range(start, last_loss))
plt.figure(19); plt.plot(x_idx,plot_cost[start:last_loss], label='Training'); plt.title("Detailed Loss");
plt.figure(19); plt.plot(x_idx,plot_cost_val[start:last_loss],label='Cross_validation');
plt.legend(loc='upper left');
plt.ylabel('Loss'); plt.xlabel('Epochs'); plt.pause(0.05)
if plot_cost_val_NO is not None:
plt.figure(18); plt.plot(plot_cost_val_NO, label='Cross_validation_NO'); plt.pause(0.05)
plt.figure(19); plt.plot(x_idx, plot_cost_val_NO[start:last_loss], label='Cross_validation_NO'); plt.pause(0.05)
""" Plots global and detailed cost functions
"""
def plot_jaccard_fun(plot_jaccard, plot_jaccard_val=False):
""" Graph global jaccard
"""
plt.figure(21); plt.clf();
plt.plot(plot_jaccard, label='Jaccard'); plt.title('Jaccard')
if plot_jaccard_val:
plt.plot(plot_jaccard_val, label='Cross Validation Jaccard');
plt.ylabel('Jaccard'); plt.xlabel('Epochs');
plt.legend(loc='upper left'); plt.pause(0.05)
def plot_overlay(plot_cost, plot_cost_val, plot_jaccard, plot_cost_val_NO=None):
""" Graph global loss
"""
plt.figure(18);
#plt.clf();
plt.plot(plot_cost, label='Training_NO_W'); plt.title('Global Loss')
plt.ylabel('Loss'); plt.xlabel('Epochs'); plt.pause(0.05)
# cross-validation
plt.figure(18); plt.plot(plot_cost_val, label='Cross_validation_NO_W'); plt.pause(0.05)
plt.legend(loc='upper left');
""" Graph detailed plot
"""
last_loss = len(plot_cost)
start = 0
if last_loss < 50:
start = 0
elif last_loss < 200:
start = last_loss - 50
elif last_loss < 500:
start = last_loss - 200
elif last_loss < 1500:
start = last_loss - 500
else:
start = last_loss - 1500
#plt.close(19);
x_idx = list(range(start, last_loss))
plt.figure(19); plt.plot(x_idx,plot_cost[start:last_loss], label='Training_NO_W'); plt.title("Detailed Loss");
plt.figure(19); plt.plot(x_idx,plot_cost_val[start:last_loss],label='Cross_validation_NO_W');
plt.legend(loc='upper left');
plt.ylabel('Loss'); plt.xlabel('Epochs'); plt.pause(0.05)
if plot_cost_val_NO is not None:
plt.figure(18); plt.plot(plot_cost_val_NO, label='Cross_validation_NO'); plt.pause(0.05)
plt.figure(19); plt.plot(x_idx, plot_cost_val_NO[start:last_loss], label='Cross_validation_NO'); plt.pause(0.05)
plt.figure(21);
#plt.clf();
plt.plot(plot_jaccard, label='Jaccard_NO_W'); plt.title('Jaccard')
plt.ylabel('Jaccard'); plt.xlabel('Epochs');
plt.legend(loc='upper left'); plt.pause(0.05)
""" Plots the moving average that is much smoother than the overall curve"""
def calc_moving_avg(plot_data, num_pts = 20, dist_points=100):
new_plot = []
for T in range(0, len(plot_data)):
avg_points = []
for i in range(-dist_points, dist_points):
if T + i < 0:
continue;
elif T + i >= len(plot_data):
break;
else:
avg_points.append(plot_data[T+i])
mean_val = sum(avg_points)/len(avg_points)
new_plot.append(mean_val)
return new_plot
def change_scale_plot():
multiply = 1000
font_size = 11
legend_size = 11
plt.rcParams.update({'font.size': 9})
"""Getting back the objects"""
plot_cost = load_pkl(s_path, 'loss_global.pkl')
plot_cost_val = load_pkl(s_path, 'loss_global_val.pkl')
plot_jaccard = load_pkl(s_path, 'jaccard.pkl')
x_idx = list(range(0, len(plot_cost) * multiply, multiply));
plt.figure(19); plt.plot(x_idx,plot_cost, label='Training_weighted');
#plt.title("Detailed Loss");
plt.figure(19); plt.plot(x_idx,plot_cost_val,label='Validation_weighted');
plt.legend(loc='upper right');
plt.ylabel('Loss', fontsize = font_size); plt.xlabel('Epochs', fontsize = font_size); plt.pause(0.05)
x_idx = list(range(0, len(plot_jaccard) * multiply, multiply));
plt.figure(20); plt.plot(x_idx,plot_jaccard, label='Validation_weighted');
#plt.title("Detailed Loss");
plt.ylabel('Jaccard', fontsize = font_size); plt.xlabel('Epochs', fontsize = font_size); plt.pause(0.05)
plt.legend(loc='upper left');
"""Getting back the objects"""
plot_cost_noW = load_pkl(s_path, 'loss_global_no_W.pkl')
plot_cost_val_noW = load_pkl(s_path, 'loss_global_val_no_W.pkl')
plot_jaccard_noW = load_pkl(s_path, 'jaccard_no_W.pkl')
x_idx = list(range(0, len(plot_cost_noW) * multiply, multiply));
plt.figure(19); plt.plot(x_idx,plot_cost_noW, label='Training_no_weight');
#plt.title("Loss");
plt.figure(19); plt.plot(x_idx,plot_cost_val_noW,label='Validation_no_weight');
plt.legend(loc='upper right', prop={'size': legend_size});
x_idx = list(range(0, len(plot_jaccard_noW) * multiply, multiply));
plt.figure(20); plt.plot(x_idx,plot_jaccard_noW, label='Validation_no_weight');
#plt.title("Jaccard");
plt.legend(loc='upper left', prop={'size': legend_size});
""" Calculate early stopping beyond 180,000 """
plot_short = plot_cost_val[30000:-1]
hist_loss = plot_short
patience_cnt = 0
for epoch in range(len(plot_short)):
# ...
# early stopping
patience = 100
min_delta = 0.02
if epoch > 0 and hist_loss[epoch-1] - hist_loss[epoch] > min_delta:
patience_cnt = 0
else:
patience_cnt += 1
if patience_cnt > patience:
print("early stopping...")
print(epoch * 5 + 30000 * 5)
break
""" 204680 """
""" MOVING AVERAGE """
num_pts = 10
dist_points = 20
mov_cost = calc_moving_avg(plot_cost, num_pts=num_pts, dist_points=dist_points)
mov_cost_val = calc_moving_avg(plot_cost_val, num_pts=num_pts, dist_points=dist_points)
mov_jaccard = calc_moving_avg(plot_jaccard, num_pts=num_pts, dist_points=dist_points)
font_size = 11
plt.rcParams.update({'font.size': 10})
x_idx = list(range(0, len(mov_cost) * multiply, multiply));
plt.figure(21); plt.plot(x_idx,mov_cost, label='Training_weighted'); plt.title("Detailed Loss");
plt.figure(21); plt.plot(x_idx,mov_cost_val,label='Validation_weighted');
plt.legend(loc='upper left');
plt.ylabel('Loss', fontsize = font_size); plt.xlabel('Epochs', fontsize = font_size); plt.pause(0.05)
x_idx = list(range(0, len(mov_jaccard) * multiply, multiply));
plt.figure(22); plt.plot(x_idx,mov_jaccard, label='Validation_weighted'); plt.title("Detailed Jaccard");
plt.ylabel('Jaccard', fontsize = font_size); plt.xlabel('Epochs', fontsize = font_size); plt.pause(0.05)
plt.legend(loc='upper left');
"""Getting back the objects"""
num_pts = 10
dist_points = 400
mov_cost_noW = calc_moving_avg(plot_cost_noW, num_pts=num_pts, dist_points=dist_points)
mov_cost_val_noW = calc_moving_avg(plot_cost_val_noW, num_pts=num_pts, dist_points=dist_points)
mov_jaccard_noW = calc_moving_avg(plot_jaccard_noW, num_pts=num_pts, dist_points=dist_points)
x_idx = list(range(0, len(mov_cost_noW) * multiply, multiply));
plt.figure(21); plt.plot(x_idx,mov_cost_noW, label='Training_no_weight'); plt.title("Loss");
plt.figure(21); plt.plot(x_idx,mov_cost_val_noW,label='Validation_no_weight');
plt.legend(loc='upper left');
x_idx = list(range(0, len(mov_jaccard_noW) * multiply, multiply));
plt.figure(22); plt.plot(x_idx,mov_jaccard_noW, label='Validation_no_weight'); plt.title("Jaccard");
plt.legend(loc='upper left');
""" Plot the average for the NEWEST MyQz11 + ClassW + No_W"""
def change_scale_plot2():
#s_path = 'C:/Users/Tiger/Anaconda3/AI stuff/MyelinUNet_new/Checkpoints/ALL_FOR_PLOT/'
s_path = 'D:/Tiger/AI stuff/MyelinUNet/Checkpoints/ALL_FOR_PLOT/'
num_pts = 10
multiply = 10
font_size = 11
legend_size = 11
plt.rcParams.update({'font.size': 9})
"""Getting back the objects"""
#plot_cost = load_pkl(s_path, 'loss_global.pkl')
plot_cost_val_noW = load_pkl(s_path, 'loss_global_sW_1_rotated.pkl')
plot_jaccard_noW = load_pkl(s_path, 'jaccard_sW_1_rotated.pkl')
"""Getting back the objects"""
#plot_cost_noW = load_pkl(s_path, 'loss_global_no_W.pkl')
plot_cost_val = load_pkl(s_path, 'loss_global_MyQ_2_not_rotated.pkl')
plot_jaccard = load_pkl(s_path, 'jaccard_MyQ_2_not_rotated.pkl')
"""Getting back the objects"""
##plot_cost_noW = load_pkl(s_path, 'loss_global_no_W.pkl')
#plot_cost_val_sW = load_pkl(s_path, 'loss_global_MyQz11_sW_batch2.pkl')
#plot_jaccard_sW = load_pkl(s_path, 'jaccard_MyQz11_sW_batch2.pkl')
font_size = 11
plt.rcParams.update({'font.size': 10})
""" no-weight """
dist_points_loss = 3
dist_points_jacc = 25
multiply = 1000
#mov_cost_noW = calc_moving_avg(plot_cost_noW, num_pts=num_pts, dist_points=dist_points)
mov_cost_val_noW = calc_moving_avg(plot_cost_val_noW, num_pts=num_pts, dist_points=dist_points_loss)
mov_jaccard_noW = calc_moving_avg(plot_jaccard_noW, num_pts=num_pts, dist_points=dist_points_jacc)
plot_single_cost(mov_cost_val_noW, multiply, 'Validation rotated', 'Loss')
plot_single_jacc(mov_jaccard_noW, multiply, 'Validation rotated', 'Jaccard')
""" class weight """
multiply = 1000
#mov_cost = calc_moving_avg(plot_cost, num_pts=num_pts, dist_points=dist_points)
mov_cost_val = calc_moving_avg(plot_cost_val, num_pts=num_pts, dist_points=dist_points_loss)
mov_jaccard = calc_moving_avg(plot_jaccard, num_pts=num_pts, dist_points=dist_points_jacc)
plot_single_cost(mov_cost_val[0:400], multiply, 'Validation no rotate', 'Loss')
plot_single_jacc(mov_jaccard[0:400], multiply, 'Validation no rotate', 'Jaccard')
""" spatial W """
multiply = 1000
#mov_cost_noW = calc_moving_avg(plot_cost_noW, num_pts=num_pts, dist_points=dist_points)
mov_cost_val_noW = calc_moving_avg(plot_cost_val_sW, num_pts=num_pts, dist_points=dist_points_loss)
mov_jaccard_noW = calc_moving_avg(plot_jaccard_sW, num_pts=num_pts, dist_points=dist_points_jacc)
plot_single_cost(mov_cost_val_noW, multiply, 'Validation spatial weight', 'Loss')
plot_single_jacc(mov_jaccard_noW, multiply, 'Validation spatial weight', 'Jaccard')
def plot_single_cost(data, multiply, label, title):
x_idx = list(range(0, len(data) * multiply, multiply));
plt.figure(21); plt.plot(x_idx,data, label=label); plt.title(title);
plt.legend(loc='upper left');
def plot_single_jacc(data, multiply, label, title):
x_idx = list(range(0, len(data) * multiply, multiply));
plt.figure(22); plt.plot(x_idx,data, label=label); plt.title(title);
plt.legend(loc='upper right');
| [
"[email protected]"
] | |
5eff543f2b5ee3686888c2c694d39b8bade76fd1 | 9a817d9063fd0e4c92b8b4e59122b1f3eb1ce31a | /manage.py | 0e57fb9345c3da83e72083974d69dcc80850c984 | [] | no_license | froglio/pazdpaula_website | fcaac08197b21443e1b6509c948ed8485e0be739 | 4ffcefc86f216210c9182ace5550a9dabeada550 | refs/heads/master | 2023-04-06T01:07:42.270286 | 2021-05-03T17:29:21 | 2021-05-03T17:29:21 | 330,445,061 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pazdpaula.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
5a1b0561f000bb1f98b804d4b36c611a61e18f75 | 03c6b643fcd652d58c9e6a02358de36081892779 | /main.py | 5f19c6e238741ddfc00dcf46901ee02ecee5390a | [
"MIT"
] | permissive | moeKiwiSAMA/tensor-snake | bae49b8a203da288a24aeafa5de2cd5bdd609a53 | c91f5b63d1d3bb9929dfdf34f09ccf99ee947486 | refs/heads/master | 2020-07-08T13:01:14.703573 | 2019-08-21T23:49:53 | 2019-08-21T23:49:53 | 203,680,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,413 | py | import numpy as np
import time
import random
class snake:
def __init__(self, height, width):
self.height = height
self.width = width
self.error = 0
self.center = (int(height / 2), int(width / 2))
self.location = []
self.foodlocation = (0, 0)
self.dirct = "l"
self.matrix = np.zeros([height,width], int)
self.start()
pass
def spawn(self, place):
try:
self.matrix.itemset(place, 1)
self.location.insert(0, place)
print(self.location)
except:
self.error = 1
pass
def suicide(self):
lo = len(self.location) - 1
print("remove", self.location[lo])
self.matrix.itemset(self.location[lo], 0)
self.location.pop()
def start(self):
self.spawn((int(self.height / 2), int(self.width / 2) - 1))
self.spawn((int(self.height / 2), int(self.width / 2)))
self.spawn((int(self.height / 2), int(self.width / 2) + 1))
self.genFood()
def genFood(self):
while True:
self.foodlocation = (random.randint(0, self.height), random.randint(0, self.width))
if self.foodlocation not in self.location:
break
self.matrix.itemset(self.foodlocation, 2)
def checkspawn(self, x, y):
if x > self.height - 1 or y > self.width - 1:
self.error = 1
else:
self.spawn((x, y))
if self.foodlocation != (x, y):
self.suicide()
else:
self.genFood()
def run(self):
if self.dirct == "h":
self.checkspawn(self.location[0][0], self.location[0][1] - 1)
elif self.dirct == "j":
self.checkspawn(self.location[0][0] + 1, self.location[0][1])
elif self.dirct == "k":
self.checkspawn(self.location[0][0] - 1, self.location[0][1])
else: # self.dirct == "l"
self.checkspawn(self.location[0][0], self.location[0][1] + 1)
if len(self.location)!=len(set(self.location)):
self.error = 1
def refresh(self, dirct):
self.dirct = dirct
self.run()
print(self.matrix)
if __name__ == '__main__':
s = snake(20, 20)
while True:
if s.error == 1:
print("error")
break
s.refresh("j")
time.sleep(0.1) | [
"[email protected]"
] | |
0c9265f590f32c9ddf9f7a2e559b410b0c5e1a19 | 013df0289a4effea55545c1a41ddb235a68bd6ce | /max_heap.py | 3c1aaad3bf0e7c7d6d157bac0470f19ceebd55bd | [] | no_license | WaspVae/Study | 9dc41d0c73b9b5fc033a7a03a88b933881973e34 | 2004185e67e984f4106215e97fc5a9ce49d14cab | refs/heads/master | 2021-02-09T18:05:06.867297 | 2020-08-25T15:10:55 | 2020-08-25T15:10:55 | 244,310,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | class MaxHeap:
def __init__(self):
self.count = 0
self.data = []
def insert(self, item):
self.data.append(item)
self.count += 1
self.shift_up(self.count)
def extract_max(self):
self.data[0], self.data[-1] = self.data[-1], self.data[0]
self.count -= 1
self.shift_down(1)
self.data.pop(-1)
def shift_up(self, k):
while k > 1 and self.data[k - 1] > self.data[k // 2 - 1]:
self.data[k - 1], self.data[k // 2 - 1] = self.data[k // 2 - 1], self.data[k - 1]
k //= 2
def shift_down(self, k):
while 2 * k <= self.count:
j = 2 * k
if j + 1 <= self.count and self.data[j - 1] < self.data[j]:
j += 1
if self.data[j - 1] < self.data[k - 1]:
break
self.data[k - 1], self.data[j - 1] = self.data[j - 1], self.data[k - 1]
k = j
| [
"[email protected]"
] | |
ffcff4429028b57bfc8fc992bd72a29135b5bbfc | 417da4d4929162c9738f2eb44bce7a93e6123fab | /scripts/script.py | d9abfb3dae544e25e51cad7ad3ba07eedd81b341 | [] | no_license | apatronl/ProgrammingLanguages | c27ffc09d81036a227c005cf73acd83d598bf45a | 42204997532d28b186c005940d872327363b177d | refs/heads/master | 2021-05-14T10:14:33.189912 | 2018-01-05T16:45:13 | 2018-01-05T16:45:13 | 116,349,196 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,225 | py | import csv
import wikipedia
import urllib.request
from bs4 import BeautifulSoup as BS
import re
pageTitle = "List of programming languages"
nodes = list(wikipedia.page(pageTitle).links)
removeList = ["List of", "Lists of", "Timeline", "Comparison of",
"History of", "Esoteric programming language"]
nodes = [i for i in nodes if not any(r in i for r in removeList)]
base = "https://en.wikipedia.org/wiki/"
def getSoup(n):
try:
with urllib.request.urlopen(base + n) as response:
soup = BS(response.read(), "html.parser")
table = soup.find_all("table", class_="infobox vevent")[0]
return table
except Exception as e:
pass
def getYear(t):
try:
t = t.get_text()
year = t[t.find("appear") : t.find("appear") + 30]
print(re.findall('(\d{4})',year))
# year = re.match(r'.*([1-3][0-9]{3})',year).group(1)
year = re.findall('(\d{4})',year)[0]
return int(year)
except Exception as e:
return "Could not determine year"
def getLinks(t):
try:
table_rows = t.find_all("tr")
for i in range(len(table_rows)):
try:
if table_rows[i].get_text() == "\nInfluenced\n":
out = []
for j in table_rows[i + 1].find_all("a"):
try:
out.append(j["title"])
except:
continue
return out
except:
continue
return
except:
return
edgeList = [["Source","Target"]]
meta = [["Id", "Year", "Label"]]
for n in nodes:
try:
temp = getSoup(n)
except:
continue
try:
influenced = getLinks(temp)
for link in influenced:
if link in nodes:
edgeList.append([n, link])
# print([n + "," + link])
except:
continue
year = getYear(temp)
meta.append([n, year, n])
with open("edge_list.csv", "w") as f:
wr = csv.writer(f)
for e in edgeList:
wr.writerow(e)
with open("metadata.csv", "w") as f2:
wr = csv.writer(f2)
for m in meta:
wr.writerow(m)
| [
"[email protected]"
] | |
6d71558f72f56b692f826f2c54b03347759f5030 | 66b220a4c8c0bfde435f29e3a18cf79f6e7a4c67 | /src/exemplos/01_Dados/02_Operadores/01-subtracao.py | 77f2c292956d5c74e2524a563e94f8fc4d5a83cb | [] | no_license | gnramos/CIC-APC | 089b6d0110394b4db97c23e032394eaefce0aeef | b94fe2dc4840064f1613d24e5d1447d49b9bb8bd | refs/heads/master | 2023-04-15T18:11:27.919896 | 2023-04-05T21:31:03 | 2023-04-05T21:31:03 | 31,514,265 | 42 | 30 | null | 2018-11-20T18:09:10 | 2015-03-01T22:57:39 | C | UTF-8 | Python | false | false | 964 | py | # -*- coding: utf-8 -*-
# @package: 01-subtracao.py
# @author: Guilherme N. Ramos ([email protected])
# @disciplina: Algoritmos e Programação de Computadores
#
# Exemplos de utilização do operador de subtração. Em Python, só é possível
# subtrair valores numéricos.
print('Subtração (numéricos):')
# Escreva o resultado da operação 2 - 1. A subtração de valores inteiros também
# é um valor inteiro.
print(' 2 - 1 =', 2 - 1)
# Escreva o resultado da operação 1 - 2.
print(' 1 - 2 =', 1 - 2)
# Escreva o resultado da operação 2 - 1.0. A subtração de valores reais de
# inteiros é um valor real.
print(' 2 - 1.0 =', 2 - 1.0)
# Escreva o resultado da operação 2.0 - 1. A subtração de valores inteiros de
# reais é um valor real.
print(' 2.0 - 1 =', 2.0 - 1)
# Escreva o resultado da operação 2.0 - 1.0. A subtração de valores reais
# também é um valor real.
print(' 2.0 - 1.0 =', 2.0 - 1.0) | [
"[email protected]"
] | |
66b1007d1dabe0428cbe0ba4c2f82d9ad8aa4dec | cb20ef5b4048457a2e6dca4a4cb45c53c9843744 | /tests/RESTful/testcases/system/test01_usermanager.py | c8459e97ec6e5c06d18b505403753911f74efb0c | [] | no_license | rudecs/openvcloud | 5001b77e8d943427c1bed563f3dcc6b9467936e2 | 12ccce2a54034f5bf5842e000c2cc3d7e22836d8 | refs/heads/master | 2020-03-24T00:00:10.422677 | 2018-11-22T13:41:17 | 2018-11-22T13:41:17 | 142,267,808 | 2 | 1 | null | 2018-07-25T08:02:37 | 2018-07-25T08:02:36 | null | UTF-8 | Python | false | false | 4,341 | py | import time, random, unittest
from testcases import *
from nose_parameterized import parameterized
class UsersTests(TestcasesBase):
def setUp(self):
super().setUp()
self.data, self.response = self.api.system.usermanager.create(provider=None)
self.assertEqual(self.response.status_code, 200, self.response.content)
self.username = self.data['username']
self.CLEANUP['users'].append(self.username)
@parameterized.expand([('exists', 200, 'true'), ('non-exist', 404, 'false')])
def test01_userget_userexists(self, case, response_code, userexists):
""" OVC-001
#. Create user (U1), should succeed.
#. Get user (U1), should succeed.
#. Check if user (U1) exists, should return true.
#. Get not existing user, should fail.
#. Check if non-existing user exists, should return false.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
response = self.api.system.usermanager.userget(name=username)
self.assertEqual(response.status_code, response_code, response.content)
response = self.api.system.usermanager.userexists(name=username)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response.text, userexists)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test02_edit_user(self, case, response_code):
""" OVC-002
#. Create user (U1), should succeed.
#. Edit user (U1), should succeed.
#. Edit non-existing user, should fail.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
data, response = self.api.system.usermanager.editUser(username=username)
self.assertEqual(response.status_code, response_code, response.content)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test03_delete_user(self, case, response_code):
""" OVC-003
#. Create user (U1), should succeed.
#. Delete user (U1), should succeed.
#. Delete none existing user, should fail.
"""
if case == 'exists':
username = self.username
else:
username = self.utils.random_string()
response = self.api.system.usermanager.delete(username=username)
self.assertEqual(response.status_code, response_code, response.content)
response = self.api.system.usermanager.userexists(name=username)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response.text, 'false')
class GroupsTests(TestcasesBase):
def setUp(self):
super().setUp()
self.data, self.response = self.api.system.usermanager.createGroup()
self.assertEqual(self.response.status_code, 200, self.response.content)
self.name = self.data['name']
def tearDown(self):
self.api.system.usermanager.deleteGroup(id=self.name)
super().tearDown()
@parameterized.expand([('exists', 200), ('non-exist', 404)])
def test01_edit_group(self, case, response_code):
""" OVC-001
#. Create group (G1), should succeed.
#. Edit group (G1), should succeed.
#. Edit non-existing group, should fail.
"""
if case == 'exists':
name = self.name
else:
name = self.utils.random_string()
data, response = self.api.system.usermanager.editGroup(name=name)
self.assertEqual(response.status_code, response_code, response.content)
@parameterized.expand([('exists', 200), ('non-exist', 404)])
@unittest.skip('https://github.com/0-complexity/openvcloud/issues/1367')
def test02_delete_group(self, case, response_code):
""" OVC-002
#. Create group (G1), should succeed.
#. Delete group (G1), should succeed.
#. Delete non-existing group, should fail.
"""
if case == 'exists':
name = self.name
else:
name = self.utils.random_string()
response = self.api.system.usermanager.deleteGroup(id=name)
self.assertEqual(response.status_code, response_code, response.content) | [
"[email protected]"
] | |
2810d657e2aa3272c2d799f7b7ea8f265d83dd92 | 321afe9ca4a30ff655483901bdb6368cce1bd58b | /catalog/migrations/0019_biditems_time.py | f8acacc12ce34e72ef8a1a024598b0d27ff127b5 | [] | no_license | moses-mugoya/Auction-System | 75456a475a0a76a9c7143f2f039e059f841d204f | 42de3e68fd7a99bdb0598f820b5f8ae6359e972d | refs/heads/main | 2023-02-04T22:58:22.793934 | 2020-12-24T18:05:51 | 2020-12-24T18:05:51 | 324,211,000 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | # Generated by Django 2.1.4 on 2019-04-07 17:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalog', '0018_auto_20190407_1343'),
]
operations = [
migrations.AddField(
model_name='biditems',
name='time',
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
] | |
f14a001ed3820955e8948479524e1a9256877fb9 | 5e1e258e79eb8f69fa05a3b611c1d1902a69dee0 | /03/part1.py | e419027b6a366f27007dd98a4939c7df0693a22a | [] | no_license | gabrielvicenteYT/aoc-2018-python | 7983a66e05ec99152361e13fb41e90a077f7145c | d278278f68b0e10e0633ab3cf297c3497b5ede8f | refs/heads/master | 2021-10-11T04:22:57.187395 | 2019-01-22T00:20:53 | 2019-01-22T00:20:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | import re
from collections import Counter
from itertools import product
usages = Counter()
with open('in.txt') as f:
for line in f:
id_, x, y, width, height = map(int, re.findall('\d+', line))
usages.update(product(range(x, x + width), range(y, y + height)))
print(sum(value > 1 for value in usages.values()))
| [
"[email protected]"
] | |
49b574c1e04a988e0805d696687ea04a0fce251c | 0ae112aff68de5f94c09d004c21834349b9afa8d | /common/match_thread.py | e5bda9c33bbd785a741e82fe5720333cb042af19 | [] | no_license | Maoyun/Azure_Line | 2f319f16663a2061997ff558bc8e955154294bfb | 7dd94e1391f82603bfc76b118b598bf05f8106c9 | refs/heads/master | 2020-03-14T05:18:06.021340 | 2018-04-29T05:08:09 | 2018-04-29T05:08:09 | 131,461,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,567 | py | # !/usr/bin/env python
# -*-coding:utf-8 -*-
# author:Dra Date:2018/2/22
import os
import sys
import random
import cv2
import time
import numpy as np
import threading
class MyThread(threading.Thread):
def __init__(self, func, args=()):
super(MyThread, self).__init__()
self.func = func
self.args = args
self.result = None
def run(self):
self.result = self.func(*self.args)
def get_result(self):
try:
return self.result # 如果子线程不使用join方法,此处可能会报没有self.result的错误
except KeyboardInterrupt:
return None
def match(img, model, value): # 模板和查找目标
try:
if model in ["diren1.png", 'diren2.png', 'diren3.png']:
value = 0.63
elif model == 'boss.png':
value = 0.58
print('value:', value)
# 确定模型类型与ID
# 也可以使用in来做判断
targets = ['chuji.png', 'guibi.png', 'qianwang.png', 'ditu8-4.png', 'boss.png', 'chuji2.png', 'queren.png',
'diren2.png', 'diren3.png', 'diren1.png']
modelid = targets.index(model)
# 加载原始图像(RGB)
img_rgb = cv2.imread(img)
# 创建原始图像的灰度版本,所有操作在灰度版本中处理,然后再RGB图像中使用相同坐标还原
img_gray = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2GRAY)
# 加载将要搜索的图像模板
tmp = cv2.imread(model, 0)
# 记录图像模板的尺寸
# w,h = tmp.shape[::-1]
# 查找图像
res = cv2.matchTemplate(img_gray, tmp, cv2.TM_CCOEFF_NORMED)
# 设定阈值
thread = value
# res大于thread
loc = np.where(res >= thread)
px = loc[1]
py = loc[0]
# for pt in zip(*loc[::-1]):
# cv2.rectangle(img_rgb,pt,(pt[0]+w,pt[1]+h),(7,249,151),2)
# cv2.namedWindow('show',0)
# cv2.resizeWindow('show',960,540)
# cv2.moveWindow('show',960,540)
# cv2.imshow("show",img_rgb)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
if len(px) != 0:
print(px[0], py[0], model)
return px[0], py[0], modelid
else:
# print([0, 0, 0])
return [0, 0]
except KeyboardInterrupt:
print('no match')
def pull_screenshot(): # 获取截图
os.system('adb shell screencap -p /sdcard/autoAzure_line.png') # png效果最好
os.system('adb pull /sdcard/autoAzure_line.png .')
def touch(touch_x1, touch_y1): # adb点击目标,添加了随机数避免被ban
cmd = 'adb shell input tap {x1} {y1}'.format(
x1=touch_x1 + random.randint(10, 100),
y1=touch_y1 + random.randint(20, 50),
# x2=touch_x1 + random.randint(0,10),
# y2=touch_y1 + random.randint(0,10),
# duration=random.randint(10,300),
)
os.system(cmd)
return touch_x1, touch_y1
def touch_boss(touch_x1, touch_y1):
cmd = 'adb shell input tap {x1} {y1}'.format(
x1=touch_x1 + random.randint(30, 80),
y1=touch_y1 + random.randint(30, 80),
# x2=touch_x1 + random.randint(0,10),
# y2=touch_y1 + random.randint(0,10),
# duration=random.randint(10,300),
)
os.system(cmd)
return touch_x1, touch_y1
def touch_diren(touch_x1, touch_y1):
cmd = 'adb shell input tap {x1} {y1}'.format(
x1=touch_x1 + random.randint(-10, 50),
y1=touch_y1 + random.randint(-10, 50),
# x2=touch_x1 + random.randint(0,10),
# y2=touch_y1 + random.randint(0,10),
# duration=random.randint(10,300),
)
os.system(cmd)
return touch_x1, touch_y1
def swipe_screen(x1, y1, x2, y2):
cmd = 'adb shell input swipe {x1} {y1} {x2} {y2}'.format(
x1=x1 + random.randint(-10, 20),
y1=y1 + random.randint(-10, 20),
x2=x2 + random.randint(0, 20),
y2=y2 + random.randint(0, 20),
# duration=random.randint(10,300),
)
os.system(cmd)
return x1, y1
def main():
count = 0
while True:
# b = []
modelid = 0
flag = 0
result_a = [0, 0]
begin_time1 = time.time()
pull_screenshot()
print("截图时间", time.time()-begin_time1)
image = "autoAzure_line.png"
targets = ['chuji.png', 'guibi.png', 'qianwang.png', 'ditu8-4.png', 'boss.png', 'chuji2.png', 'queren.png',
'diren2.png', 'diren3.png', 'diren1.png']
# enemy = ['diren2.png', 'diren3.png', 'diren1.png']
value = 0.75
begin_time = time.time()
ts = []
for target in targets:
# print(target)
th = MyThread(match, args=(image, target, value))
th.start()
ts.append(th)
# print(ts)
print('多线程使用时间1:', time.time() - begin_time)
for th in ts: # 获取线程处理结果
th.join()
a = th.get_result()
# print(a, target)
if a[0] != 0 and flag ==0:
if a[2] in range(7):
result_a[0:2] = a[0:2]
modelid = a[2]
print(result_a, modelid)
flag = 1
else:
if result_a[1] >= a[1]: # 预留的处理找到敌人后先打哪个的位置 目前是先打最下面的
result_a[0:2] = result_a[0:2] # 取前两位作为坐标
modelid = a[2]
# result_a = a
# print('####################')
else:
result_a[0:2] = a[0:2]
modelid = a[2]
# print("@@@@@@@@@@@@@@@@@@@@")
print('多线程使用时间2:', time.time() - begin_time)
match_time = time.time() - begin_time
print("匹配时间", match_time)
# if result_a[0] == 0:
# for i in range(len(enemy)):
# result_a = (match(image, enemy[i], 0.6))
# b.append(result_a)
# c = sum(b[0]), sum(b[1]), sum(b[2])
# if sum(c) != 0:
# result_a = b[c.index(min(filter(None, c)))]
# print(filter(None, c))
# else:
# print(2)
# result_a = [250, 250]
if result_a[0] == 0:
result_a = [380, 0]
x = result_a[0]
y = result_a[1]
# touch(xbn ,y)
# print('识别模型:', target)
print('识别结果:', result_a)
# print(b)
if modelid == 4:
print(touch_boss(x, y), 'boss'+targets[modelid])
count += 1
# time.sleep(2)
elif modelid in [7, 8, 9]:
print(touch_diren(x, y), 'diren'+targets[modelid])
elif modelid == 2:
print(touch(x, y))
# flag = 1
elif modelid == 2: # 进入8-4地图向下滑动
# print(touch(x, y))
time.sleep(1)
print(swipe_screen(100, 250, 100, 100))
# flag = 0
else:
print(touch(x, y), '其他'+targets[modelid])
wait = random.random() + 0 # 停0~9秒 指数越高平均间隔越短
print("等待时间", wait)
time.sleep(wait)
print('boss:', count)
print('运行时间:', time.time()-begin_time1)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
os.system('adb kill -server')
print('bye')
exit(0)
# TODO: 过程中要添加随机点击-OK
# TODO:寻找LV小怪的时候随机指数要反向写-CANCEL
# TODO:改善找到boss后要经过多次点击进入boss,战斗过程中程序失败-to do
# TODO: 改为三通道匹配-CANCEL
# TODO: 决策时间太慢了,boss与小怪的匹配程度太低 了-OK
# TODO: 增加滑动屏幕更改地图位置功能-OK
# TODO: 增加统计战利品功能
# TODO: 改善现有的boss数量统计功能
# TODO: 根据实际情况滑动地图,因为地图并不是在右下角总有敌人
# 0329
# TODO: 根据新出活动更改寻找敌人的方式——同时找三种敌人,并且先打左上角的-OK
# TODO: 打不到就选下一个
# 0427
# TODO: 要求所有搜索做到并发多线程
# 0429
# 解决了关于碰到多个模型同时出现,但是要求优先度的问题
# TODO: 需要解决match到相同模型时需要选择适合目标的情况,还需要屏幕向下滑动一下(8-4)
| [
"[email protected]"
] | |
9016ca060e58ff5454c362fcf89a1cb3825240a8 | 1d3af66da1c599590c431317b02d315af9f68679 | /publishing_system/views.py | 356db6d70bca2c827351a3a10ca8e95f3b63f75b | [] | no_license | huidou74/CMDB-01 | 9ca03755da9a652cfdfcbaaf8acbc720f6c8cfdd | e8f78b7eeac854df11d2528a7efa2cd15137fbe3 | refs/heads/master | 2020-04-29T15:38:42.082743 | 2019-03-18T08:42:28 | 2019-03-18T08:42:28 | 176,235,247 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,135 | py |
# Create your views here.
from django.shortcuts import render, HttpResponse, redirect
from hc import models
from hc_auth import auth_data
import os,sys
from publishing_system.task import add, mian_salt
from celery.result import AsyncResult
from lyh_project.celery import app
from publishing_system.main_salt import MainSalt,UtcTime, sub_run
from publishing_system.salt_run import Salt_Run
def publish(request):
if request.method == 'GET':
user = request.session['login']
obj_user = models.Login.objects.filter(username=user).first()
tem = 'user_auth_info'
if request.session['auto_user']:
menu_dict = request.session.get('menu_dict')
auth_user = request.session['auto_user']
obj_auto = models.UserInfo.objects.filter(name=auth_user)
if obj_auto:
auth_data.menu_auth(obj_auto, request)
userinfo = request.session.get('auto_user')
pos = request.session.get('auto_user_pos')
img = request.session.get('auto_user_img')
username = request.session.get('auto_user')
obj_auth_user = models.UserInfo.objects.filter(name=username).first()
# 渲染所有数据
envs = models.Use_Env.objects.all()
return render(request, './bootstarp/publishing/right_away.html', locals())
else:
user = request.session['login']
obj_user = models.Login.objects.filter(username=user).first()
tem = 'user_auth_info'
if request.session['auto_user']:
menu_dict = request.session.get('menu_dict')
auth_user = request.session['auto_user']
obj_auto = models.UserInfo.objects.filter(name=auth_user)
if obj_auto:
auth_data.menu_auth(obj_auto, request)
userinfo = request.session.get('auto_user')
pos = request.session.get('auto_user_pos')
img = request.session.get('auto_user_img')
username = request.session.get('auto_user')
obj_auth_user = models.UserInfo.objects.filter(name=username).first()
# 上面是权限页面需要的数据
# 下面才是我的逻辑
envs = models.Use_Env.objects.all()
env = request.POST.get('env')
app = request.POST.get('app')
obj_list = models.App.objects.filter(name=app, environment__name=env).first() # 跨表查询
hosts_list = []
if obj_list:
for i in obj_list.hosts.all():
id = i.hostname #id #hc - 01
# print ('id -> ',id)
hosts_list.append({'id':str(i.hostname),'path':str(obj_list.path),'app_name': str(obj_list),'package':str(obj_list.package.pack_path)})
aaa = Salt_Run(hosts_list)
jgs=aaa.run_list()
return render(request, './bootstarp/publishing/right_away.html', locals())
def celery_status(request):
time_list = ['year', 'month', 'day', 'hour', 'minute']
time_dict = {}
envss = models.Use_Env.objects.all()
if request.method == 'GET':
# 页面权限需要的数据
user = request.session['login']
obj_user = models.Login.objects.filter(username=user).first()
tem = 'user_auth_info'
if request.session['auto_user']:
menu_dict = request.session.get('menu_dict')
auth_user = request.session['auto_user']
obj_auto = models.UserInfo.objects.filter(name=auth_user)
if obj_auto:
auth_data.menu_auth(obj_auto, request)
userinfo = request.session.get('auto_user')
pos = request.session.get('auto_user_pos')
img = request.session.get('auto_user_img')
username = request.session.get('auto_user')
obj_auth_user = models.UserInfo.objects.filter(name=username).first()
# 当前应用 的逻辑
x = request.GET.get('x')
y = request.GET.get('y')
envs = request.GET.get('envs')
apps = request.GET.get('apps')
obj_list = models.App.objects.filter(name=apps, environment__name=envs).first() # 跨表查询
hosts_list = []
if obj_list:
for i in obj_list.hosts.all():
id = i.hostname # id #hc - 01
hosts_list.append({'id': str(i.hostname), 'path': str(obj_list.path), 'app_name': str(obj_list),
'package': str(obj_list.package.pack_path)})
try :
if x and y or envs and apps:
try :
after = request.GET.get('after')
if after:
utc = UtcTime(after=int(after))
ctime_x = utc.after_time()
if ctime_x:
# 最核心的代码
if x and y:
ret = add.apply_async(args=[int(x), int(y)], eta=ctime_x)
num = ret.id
elif envs and apps:
ret = mian_salt.apply_async(args=[hosts_list], eta=ctime_x)
num = ret.id
except ValueError:
after_error = '请正确输入数值'
year = request.GET.get('year')
mouth = request.GET.get('month')
day = request.GET.get('day')
hour = request.GET.get('hour')
minute = request.GET.get('minute')
if year and mouth and day and hour and minute:
try:
for i in time_list:
a = request.GET.get(i)
time_dict.update({i: int(a)})
utc = UtcTime(**time_dict)
ctime_x = utc.ctime()
if ctime_x:
if x and y:
ret = add.apply_async(args=[int(x), int(y)], eta=ctime_x)
num = ret.id
elif envs and apps:
ret = mian_salt.apply_async(args=[hosts_list], eta=ctime_x)
num = ret.id
except ValueError:
error = '请正确输入日期数值'
else:
error = '请将表格数据输入完整'
except ValueError:
error = '请正确输入日期数值'
cancel = request.GET.get('cancel')
if cancel:
async = AsyncResult(id=cancel, app=app)
async.revoke(terminate=True)
cancel_tag='取消成功' # 定时任务的取消 是 还没执行之前就取消,执行了放在消息队列里面了就不行被取消
async.forget()
stop = request.GET.get('stop')
if stop:
async = AsyncResult(id=stop, app=app)
async.revoke()
stop_tag='中止成功' # 定时任务的中止, 是 在执行的过程中,中止任务,必须是在执行的时候
async.forget()
return render(request, './bootstarp/publishing/timing.html', locals())
elif request.method == 'POST':
user = request.session['login']
obj_user = models.Login.objects.filter(username=user).first()
tem = 'user_auth_info'
if request.session['auto_user']:
menu_dict = request.session.get('menu_dict')
auth_user = request.session['auto_user']
obj_auto = models.UserInfo.objects.filter(name=auth_user)
if obj_auto:
auth_data.menu_auth(obj_auto, request)
userinfo = request.session.get('auto_user')
pos = request.session.get('auto_user_pos')
img = request.session.get('auto_user_img')
username = request.session.get('auto_user')
obj_auth_user = models.UserInfo.objects.filter(name=username).first()
ret = request.POST.get('id', '').strip(' ')
data = ""
forget = request.POST.get('forget')
if ret:
async = AsyncResult(id=ret, app=app)
if async.successful():
data = "执行成功,数据如下"
jg = async.get()
if not forget:
jg='清除完成'
async.forget()
elif async.failed():
data = '执行失败'
elif async.status == 'PENDING':
data = "等待被执行"
elif async.status == 'RETPY':
data = '任务异常正常重试'
elif async.status == 'STARTED':
data = "任务正在执行"
else:
data = "未知"
else:
data = '请正确填写对应 ID '
return render(request,'./bootstarp/publishing/timing.html', locals()) | [
"[email protected]"
] | |
c6d625d9f68df0ae43347adb40152469782dbb6c | 8caa427e6c2a932d65de15651d41adcfe8c99398 | /1-MIT_Python/ProblemSet6/ps6_encryption.py | 0e276200dbc78ad3fe9d0eda36260e4e2cceeeae | [] | no_license | HillBamboo/MOOCs | d300f341dfe2edffbc27172332bc44896cc1c699 | abe3ca04f6dbfd42e4f78f5a6e0e92f996a1895a | refs/heads/master | 2021-01-10T17:49:02.870852 | 2017-02-18T12:29:20 | 2017-02-18T12:29:20 | 46,789,365 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,376 | py | # 6.00x Problem Set 6
#
# Part 1 - HAIL CAESAR!
import string
import random
WORDLIST_FILENAME = "words.txt"
# -----------------------------------
# Helper code
# (you don't need to understand this helper code)
def loadWords():
"""
Returns a list of valid words. Words are strings of lowercase letters.
Depending on the size of the word list, this function may
take a while to finish.
"""
print "Loading word list from file..."
inFile = open(WORDLIST_FILENAME, 'r')
wordList = inFile.read().split()
print " ", len(wordList), "words loaded."
return wordList
def isWord(wordList, word):
"""
Determines if word is a valid word.
wordList: list of words in the dictionary.
word: a possible word.
returns True if word is in wordList.
Example:
>>> WORDLIST_FILENAME = "words.txt"
>>> isWord(WORDLIST_FILENAME, 'bat')
True
>>> isWord(WORDLIST_FILENAME, 'asdf')
False
"""
word = word.lower()
word = word.strip(" !@#$%^&*()-_+={}[]|\\:;'<>?,./\"")
return word in wordList
def randomWord(wordList):
"""
Returns a random word.
wordList: list of words
returns: a word from wordList at random
"""
return random.choice(wordList)
def randomString(wordList, n):
"""
Returns a string containing n random words from wordList
wordList: list of words
returns: a string of random words separated by spaces.
"""
return " ".join([randomWord(wordList) for _ in range(n)])
def randomScrambled(wordList, n):
"""
Generates a test string by generating an n-word random string
and encrypting it with a sequence of random shifts.
wordList: list of words
n: number of random words to generate and scamble
returns: a scrambled string of n random words
NOTE:
This function will ONLY work once you have completed your
implementation of applyShifts!
"""
s = randomString(wordList, n) + " "
shifts = [(i, random.randint(0, 25)) for i in range(len(s)) if s[i-1] == ' ']
return applyShifts(s, shifts)[:-1]
def getStoryString():
"""
Returns a story in encrypted text.
"""
return open("story.txt", "r").read()
# (end of helper code)
# -----------------------------------
#
# Problem 1: Encryption
#
def buildCoder(shift):
"""
Returns a dict that can apply a Caesar cipher to a letter.
The cipher is defined by the shift value. Ignores non-letter characters
like punctuation, numbers and spaces.
shift: 0 <= int < 26
returns: dict
"""
ls = list(string.ascii_lowercase)
us = list(string.ascii_uppercase)
return dict(zip(us[:] + ls[:], us[shift:] + us[:shift] + ls[shift:] + ls[:shift]))
def applyCoder(text, coder):
"""
Applies the coder to the text. Returns the encoded text.
text: string
coder: dict with mappings of characters to shifted characters
returns: text after mapping coder chars to original text
"""
ans = str()
for i in text[:]:
if i in string.letters:
ans += coder[i]
else:
ans += i
return ans
# return sum([coder[i] for i in text if i in string.letters else i])
def applyShift(text, shift):
"""
Given a text, returns a new text Caesar shifted by the given shift
offset. Lower case letters should remain lower case, upper case
letters should remain upper case, and all other punctuation should
stay as it is.
text: string to apply the shift to
shift: amount to shift the text (0 <= int < 26)
returns: text after being shifted by specified amount.
"""
coder = buildCoder(shift)
ans = str()
for i in text[:]:
if i in string.letters:
ans += coder[i]
else:
ans += i
return ans
#
# Problem 2: Decryption
#
def findBestShift(wordList, text):
"""
Finds a shift key that can decrypt the encoded text.
text: string
returns: 0 <= int < 26
Example:
>>> WORDLIST_FILENAME = "words.txt"
>>> s = applyShift('Hello, world!', 8)
>>> s
'Pmttw, ewztl!'
>>> findBestShift(WORDLIST_FILENAME, s)
18
>>> applyShift(s, 18)
'Hello, world!'
"""
max_num, best_shift, cnt = 0, 0, 0
for shift in range(26):
tmp = applyShift(text, shift)
tmp = tmp.split(' ')
cnt = sum([1 for word in tmp if isWord(wordList, word)])
if cnt > max_num:
max_num = cnt
best_shift = shift
return best_shift
def decryptStory():
"""
Using the methods you created in this problem set,
decrypt the story given by the function getStoryString().
Use the functions getStoryString and loadWords to get the
raw data you need.
returns: string - story in plain text
"""
story = getStoryString()
wordList = loadWords()
best_shift = findBestShift(wordList, story)
return applyShift(story, best_shift)
#
# Build data structures used for entire session and run encryption
#
if __name__ == '__main__':
# To test findBestShift:
wordList = loadWords()
s = applyShift('Hello, world!', 8)
bestShift = findBestShift(wordList, s)
assert applyShift(s, bestShift) == 'Hello, world!'
# To test decryptStory, comment the above four lines and uncomment this line:
decryptStory()
| [
"[email protected]"
] | |
9a36b98bb923fd7dead1786e076de9a8d5e3a170 | bf1461cbd737917d359998c1235c21386529b1b3 | /bin/motor_intermediate.py | 7649bea3ffbaeb41e705a413caecf33d525a6faf | [] | no_license | glynfinck/trashbot_2dnav | d29bf0e35c3463b401d7ee54c94a1541be676497 | 90ac956e9a2889680238a30324d054c0c030a5da | refs/heads/master | 2020-12-11T21:28:11.184688 | 2020-03-09T00:04:14 | 2020-03-09T00:04:14 | 233,964,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,313 | py | #! /usr/bin/env python
import rospy
import yaml
import os
from std_msgs.msg import String
from geometry_msgs.msg import Twist
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_LOCAL_PLANNER_PATH = BASE_DIR+"/param"+"/base_local_planner_params.yaml"
DESIRED_BASE_LOCAL_PLANNER_PATH = BASE_DIR+"/param"+"/desired_base_local_planner_params.yaml"
with open(BASE_LOCAL_PLANNER_PATH, 'r') as stream:
try:
base_local_planner_yaml = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
with open(DESIRED_BASE_LOCAL_PLANNER_PATH, 'r') as stream:
try:
desired_base_local_planner_yaml = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print(type(desired_base_local_planner_yaml))
INPUT_PLANNER = base_local_planner_yaml.get("TrajectoryPlannerROS")
DESIRED_PLANNER = desired_base_local_planner_yaml.get("TrajectoryPlannerROS")
LINEAR_VEL_SCALE=1.0
ANGULAR_VEL_SCALE=1.0
cmd_vel_pub = rospy.Publisher('/cmd_vel', String, queue_size=10)
rospy.init_node('motor_intermediate', anonymous=True)
def convert_range(x,InputLow,InputHigh,OutputLow,OutputHigh):
return ((x - InputLow) / (InputHigh - InputLow)) * (OutputHigh - OutputLow) + OutputLow
def callback(data):
new_vel = Twist()
new_vel.linear.x = convert_range(
data.linear.x,
INPUT_PLANNER.get("min_vel_x"),
INPUT_PLANNER.get("max_vel_x"),
DESIRED_PLANNER.get("min_vel_x"),
DESIRED_PLANNER.get("max_vel_x")
)
new_vel.linear.y = convert_range(
data.linear.y,
INPUT_PLANNER.get("min_vel_x"),
INPUT_PLANNER.get("max_vel_x"),
DESIRED_PLANNER.get("min_vel_x"),
DESIRED_PLANNER.get("max_vel_x")
)
new_vel.linear.z = convert_range(
data.linear.z,
INPUT_PLANNER.get("min_vel_x"),
INPUT_PLANNER.get("max_vel_x"),
DESIRED_PLANNER.get("min_vel_x"),
DESIRED_PLANNER.get("max_vel_x")
)
if float(new_vel.linear.x) == 0.0 and float(new_vel.linear.y) and float(new_vel.linear.z):
new_vel.angular.x = convert_range(
data.angular.x,
INPUT_PLANNER.get("min_vel_theta"),
INPUT_PLANNER.get("max_vel_theta"),
DESIRED_PLANNER.get("min_vel_theta"),
DESIRED_PLANNER.get("max_vel_theta")
)
new_vel.angular.y = convert_range(
data.angular.y,
INPUT_PLANNER.get("min_vel_theta"),
INPUT_PLANNER.get("max_vel_theta"),
DESIRED_PLANNER.get("min_vel_theta"),
DESIRED_PLANNER.get("max_vel_theta")
)
new_vel.angular.z = convert_range(
data.angular.z,
INPUT_PLANNER.get("min_vel_theta"),
INPUT_PLANNER.get("max_vel_theta"),
DESIRED_PLANNER.get("min_vel_theta"),
DESIRED_PLANNER.get("max_vel_theta")
)
else:
new_vel.angular.x = convert_range(
data.angular.x,
(-1)*INPUT_PLANNER.get("min_in_place_vel_theta"),
INPUT_PLANNER.get("min_in_place_vel_theta"),
(-1)*DESIRED_PLANNER.get("min_in_place_vel_theta"),
DESIRED_PLANNER.get("min_in_place_vel_theta")
)
new_vel.angular.y = convert_range(
data.angular.y,
INPUT_PLANNER.get("min_in_place_vel_theta"),
INPUT_PLANNER.get("min_in_place_vel_theta"),
DESIRED_PLANNER.get("min_in_place_vel_theta"),
DESIRED_PLANNER.get("min_in_place_vel_theta")
)
new_vel.angular.z = convert_range(
data.angular.z,
INPUT_PLANNER.get("min_in_place_vel_theta"),
INPUT_PLANNER.get("min_in_place_vel_theta"),
DESIRED_PLANNER.get("min_in_place_vel_theta"),
DESIRED_PLANNER.get("min_in_place_vel_theta")
)
cmd_vel_pub.publish(new_vel)
def listener():
# In ROS, nodes are uniquely named. If two nodes with the same
# name are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.
rospy.Subscriber("/inter_cmd_vel", Twist, callback)
rospy.spin()
if __name__ == '__main__':
listener() | [
"[email protected]"
] | |
7bc58f3621d6e04206543d4b556929e56b1c3b0f | 5d11146d1d81fc7c75f64f902d84673401f7a4b9 | /oopython/example_code/vt23/kmom03/get_post_ok/src/guess_game.py | 92315a082205be08b15bd1c81c5ada27db284b32 | [
"MIT"
] | permissive | dbwebb-se/python-slides | ca82688e1ea0d527fc650eb2872f3c71425690f1 | 4c5f0bb9b5873fcc2fd89740b36ede78b2f9d6c2 | refs/heads/master | 2023-08-31T07:53:11.958487 | 2023-08-22T12:21:16 | 2023-08-22T12:21:16 | 144,724,378 | 0 | 0 | MIT | 2023-03-08T20:50:56 | 2018-08-14T13:30:50 | HTML | UTF-8 | Python | false | false | 1,766 | py | #!/usr/bin/env python3
"""
Main class for the guessing game
"""
import random
from src.guess import Guess
class GuessGame:
"""
Holds info for playing a guessing game
"""
def __init__(self, correct_value=None, guesses=None):
if correct_value is not None:
self._correct_value = correct_value
else:
self._correct_value = random.randint(1, 15)
self.guesses = []
if guesses:
for value, attempt, is_correct in guesses:
self.guesses.append(Guess(value, attempt, is_correct))
# self.guesses = [Guess(v, a, c) for v, a, c in guesses] if guesses is not None else [] # denna raden gör samma sak som de fyra raderna ovanför
self.guess_attempts = len(self.guesses)
def make_guess(self, guess_value):
"""
Makes a new guess and adds to list
"""
self.guess_attempts += 1
if guess_value == self._correct_value:
self.guesses.append(Guess(guess_value, self.guess_attempts, True))
return True
self.guesses.append(Guess(guess_value, self.guess_attempts))
return False
def get_correct_value(self):
""" Return private attribute """
return self._correct_value
def get_if_guessed_correct(self):
""" return if last guess was correct or not """
return self.guesses[-1].correct if self.guesses else False
def to_list(self):
""" Turn old guesses to a list """
# new_list = []
# for g in self.guesses:
# new_list.append((g.value, g.attempt, g.correct))
# return new_list
return [(g.value, g.attempt, g.correct) for g in self.guesses] # denna raden gör samma sak som de fyra raderna ovanför. | [
"[email protected]"
] | |
97af32bd4a5c4035d0706ba4db1578224129c7e6 | f48c6625dbd6d64a203a7f4e2c1cceec1afe01e8 | /bit_manipulation.py | 493d33ceb087f382aee7e4eccf094369baf9ec0c | [] | no_license | SergiosKar/Algorithms_DataStructures | 3087dfd34ab303732e0bedd6e0182dde4915ae87 | 0c5a7fe702f3cb54d4774e4fb0b4694bef2a7f07 | refs/heads/master | 2020-05-07T20:42:49.822989 | 2019-06-24T17:59:22 | 2019-06-24T17:59:22 | 180,874,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | num=72
print(bin(num))
for pos in range(8):
print(bin(1<<pos))
print(' ') | [
"[email protected]"
] | |
ea25370b4ca6e70943758b90f71ffcc5e2e588ed | ea1cd395be1c201e7f8cd49ff6d556fa4c853b74 | /PythonDataStructures/Sports.py | 0e1a0f12b75ce9f04a96ef2e68336295e6b4d085 | [] | no_license | itsmrajesh/python-programming-questions | 1c7571abc419da2617d46d5c1c824095ca7ec592 | 5820c28131aeb3caa17efe2a83de07576755c300 | refs/heads/master | 2020-06-11T12:29:40.433704 | 2019-09-05T12:37:03 | 2019-09-05T12:37:03 | 193,964,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 896 | py | Cricket=[ "PKM", "ALN", "GLN", "NVR", "PVR", "KM", "VP", "CS", "MCS"]
Football=[ "PKM", "ALN","RMZ","CS", "MCS"]
Badminton=[ "PKM", "ALN", "NV", "KM","RMV"]
all_Players_list=list()
all_Players_list.extend(Cricket)
all_Players_list.extend(Football)
all_Players_list.extend(Badminton)
def displayNames(data):
for name in data:
print(name ,end=" ")
print()
print("All Players List")
displayNames(all_Players_list)
unique_Players_Set=set()
unique_Players_Set={name for name in all_Players_list}
#for name in all_Players_list:
# unique_Players_Set.add(name)
print("Unique Players")
displayNames(unique_Players_Set)
all_games_players_list=list()
for name in unique_Players_Set:
if name in (Cricket and Football and Badminton):
all_games_players_list.append(name)
print("Player who play all 3 games")
displayNames(all_games_players_list)
| [
"[email protected]"
] | |
0b6b1aff0397d9ac19c5d18aa1bfecfa51153e80 | c714f2e2524b1797636699602d746fb7de064083 | /SLAM2D.py | 1bc95a6c180f6238748ede52f059898a6461f4ac | [] | no_license | arnolde/SLAM | 7c8b6ae80977fd1eabf2250d5ac7e3a393ded977 | 028d1795a3bf1acb8ebad4c91440c5646ff96b3f | refs/heads/master | 2020-09-08T15:50:25.373110 | 2019-11-12T10:21:19 | 2019-11-12T10:21:19 | 221,176,240 | 0 | 0 | null | 2019-11-12T09:13:59 | 2019-11-12T09:13:58 | null | UTF-8 | Python | false | false | 24,003 | py | import numpy as np
from numpy.linalg import inv, multi_dot
from numpy import dot
from matplotlib import pyplot as plt
from matplotlib.patches import Circle
import math
import seaborn
from scipy import sparse
from scipy.sparse import linalg
def dots(*arg):
return multi_dot(arg)
class BasicMovement:
def __init__(self, maxSpeed, maxRotation, covariance, measureFunction):
self.maxSpeed = maxSpeed
self.maxRotation = maxRotation
self.measureFunction = measureFunction
self.covariance = np.atleast_2d(covariance)
# Input the real state
def move(self, state, covariance=None, command=None):
command = self.__choose_command(state) if command is None else command
noise = self.__get_noise(covariance)
idealMove = self.exact_move(state, command)
realMove = self.__noisy_move(state, idealMove, noise)
newState = state + realMove
return clipState(newState), command
def __choose_command(self, state):
speed = self.maxSpeed * np.random.rand()
if (np.linalg.norm(state[:2]) > 100):
_, rotation = self.measureFunction(state[:3], [[0], [0]])
rotation = np.clip(rotation, -self.maxRotation, self.maxRotation)
else:
rotation = (np.random.rand() * 2 - 1) * self.maxRotation
return [speed, rotation]
def exact_move(self, state, command):
speed, rotation = command
angle = state[2]
deltaX = speed * math.cos(angle)
deltaY = speed * math.sin(angle)
move = np.zeros_like(state)
move[:3, 0] = [deltaX, deltaY, rotation]
return move
def __noisy_move(self, state, idealMove, noise):
noisyMove = idealMove[:3] + noise
noisySpeed, _ = self.measureFunction(noisyMove[:3], np.zeros_like(noise)[:2])
noisyRotation = noisyMove[2]
maxs = [self.maxSpeed, self.maxRotation]
mins = [0, -self.maxRotation]
correctedCommand = np.clip([noisySpeed, noisyRotation], mins, maxs)
return self.exact_move(state, correctedCommand)
def __noisy_move2(self, state, idealMove, noise):
noisyMove = np.zeros_like(state)
noisyMove[:3] = idealMove[:3] + noise
return noisyMove
def __get_noise(self, covariance):
covariance = self.covariance if covariance is None else covariance
noise = np.random.multivariate_normal(np.zeros(covariance.shape[0]), covariance, 1).T
return noise
class BasicMeasurement:
def __init__(self, covariance, robotFeaturesDim, envFeaturesDim, measureFunction, gradMeasureFunction, detectionSize=0, detectionCone=0):
self.covariance = np.atleast_2d(covariance)
self.robotFeaturesDim = robotFeaturesDim
self.envFeaturesDim = envFeaturesDim
self.measureFunction = measureFunction
self.gradMeasureFunction = gradMeasureFunction
self.detectionSize = detectionSize
self.detectionCone = detectionCone
# Input the real state
def measure(self, state):
dim = state.shape[0]
dimR = self.robotFeaturesDim
dimE = self.envFeaturesDim
rState = state[:dimR]
envState = state[dimR:]
nbLandmark = (dim - dimR) / dimE
mes = np.zeros(nbLandmark * dimE).reshape(nbLandmark, dimE)
landmarkIds = np.zeros(nbLandmark)
j = 0
for i, landmark in enumerate(envState.reshape((nbLandmark, dimE, 1))):
diffNorm, diffAngle = self.measureFunction(rState, landmark)
angleOk = (abs(clipAngle(diffAngle, True)) < self.detectionCone / 2.) or (self.detectionCone is 0)
distanceOk = (diffNorm < self.detectionSize) or (self.detectionSize is 0)
if distanceOk and angleOk:
mes[j] = [diffNorm, diffAngle]
landmarkIds[j] = i
j += 1
mes = mes[:j]
landmarkIds = landmarkIds[:j]
mes = np.array(mes) + self.__get_noise(mes)
return mes, landmarkIds
def __get_noise(self, mes):
noise = np.random.multivariate_normal(np.zeros(self.covariance.shape[0]), self.covariance, mes.shape[0])
return noise
class SEIFModel:
def __init__(self, dimension, robotFeaturesDim, envFeaturesDim, motionModel, mesModel, covMes, muInitial, maxLinks):
self.robotFeaturesDim = robotFeaturesDim
self.envFeaturesDim = envFeaturesDim
self.dimension = dimension
self.H = np.eye(dimension)
self.b = dot(muInitial.T, self.H)
self.mu = muInitial.copy()
self.Sx = np.zeros(dimension * robotFeaturesDim).reshape((dimension, robotFeaturesDim))
self.Sx[:robotFeaturesDim] = np.eye(robotFeaturesDim)
self.invZ = inv(covMes)
self.motionModel = motionModel
self.mesModel = mesModel
self.maxLinks = maxLinks
def update(self, measures, landmarkIds, command, U):
self.__motion_update_sparse(command, U)
self.__mean_update()
for ldmIndex, ldmMes in zip(landmarkIds, measures):
self.__measurement_update(ldmMes, int(ldmIndex))
self.__mean_update()
self.__sparsification()
return self.H, self.b, self.mu
def __motion_update(self, command, U):
r = self.robotFeaturesDim
previousMeanState = self.estimate()
meanStateChange = self.motionModel.exact_move(previousMeanState, command)
newMeanState = clipState(previousMeanState + meanStateChange)
# TO IMPROVE
angle = previousMeanState[2, 0] # TO IMPROVE
gradMeanMotion = np.zeros_like(self.H) # TO IMPROVE
gradMeanMotion[2, 0:2] = command[0] * np.array([-math.sin(angle), math.cos(angle)]) # TO IMPROVE
delta = dots(self.Sx.T, gradMeanMotion, self.Sx)
G = dots(self.Sx, (inv(np.eye(r) + delta) - np.eye(r)), self.Sx.T)
phi = np.eye(self.dimension) + G
Hp = dots(phi.T, self.H, phi)
deltaH = dots(Hp, self.Sx, inv(inv(U) + dots(self.Sx.T, Hp, self.Sx)), self.Sx.T, Hp)
H = Hp - deltaH
self.H = H
self.b = dot(newMeanState.T, self.H)
self.mu = newMeanState
def __motion_update_sparse(self, command, U):
r = self.robotFeaturesDim
previousMeanState = self.estimate()
meanStateChange = self.motionModel.exact_move(previousMeanState, command)
newMeanState = clipState(previousMeanState + meanStateChange)
# TO IMPROVE
angle = previousMeanState[2, 0] # TO IMPROVE
gradMeanMotion = np.zeros_like(self.H) # TO IMPROVE
gradMeanMotion[2, 0:2] = command[0] * np.array([-math.sin(angle), math.cos(angle)]) # TO IMPROVE
Sx = sparse.bsr_matrix(self.Sx)
sH = sparse.bsr_matrix(self.H)
invU = sparse.coo_matrix(inv(U))
sparseGradMeanMotion = sparse.bsr_matrix(gradMeanMotion)
delta = Sx.T.dot(sparseGradMeanMotion).dot(Sx)
G = Sx.dot(linalg.inv(sparse.eye(r) + delta) - sparse.eye(r)).dot(Sx.T)
phi = sparse.eye(self.dimension) + G
Hp = phi.T.dot(sH).dot(phi)
deltaH = Hp.dot(Sx).dot(linalg.inv(invU + Sx.T.dot(Hp).dot(Sx))).dot(Sx.T).dot(Hp)
# H = inv(Hp + dots(self.Sx, U, self.Sx.T))
H = Hp - deltaH
# self.b = self.b - dot(previousMeanState.T, self.H - H) + dot(meanStateChange.T, H)
self.H = H.todense()
self.b = H.dot(newMeanState).T
self.mu = newMeanState
def __mean_update(self):
''' Coordinate ascent '''
mu = self.mu
iterations = 30
y0, yp = self.__partition_links()
y = np.concatenate([np.arange(self.robotFeaturesDim), y0, yp])
# vMu = dot(self.b, inv(self.H)).T
# muSave = []
# muSave2 = []
for t in xrange(iterations):
for i in y:
y2 = np.setdiff1d(y, i)
mu[i] = (self.b[0, i] - dot(self.H[i, y2], mu[y2])) / self.H[i, i]
# muSave.extend([np.linalg.norm(mu - vMu)])
self.mu = mu
# plt.plot(muSave)
def __measurement_update(self, ldmMes, ldmIndex):
mu = self.mu
meanMes, gradMeanMes = self.__get_mean_measurement_params(mu, ldmIndex)
z = np.array(ldmMes).reshape(len(ldmMes), 1)
zM = np.array(meanMes).reshape(len(ldmMes), 1)
C = gradMeanMes
mesError = (z - zM)
mesError[1, 0] = clipAngle(mesError[1, 0], force=True)
correction = mesError + dot(C.T, mu)
correction[1, 0] = clipAngle(correction[1, 0])
self.H += dot(dot(C, self.invZ), C.T)
self.b += dot(dot(correction.T, self.invZ), C.T)
def __partition_links(self):
r = self.robotFeaturesDim
e = self.envFeaturesDim
d = self.dimension
l = (d - r) / e
arrRF = np.arange(r)
norms = np.array([np.linalg.norm(self.H[arrRF][:, np.arange(i * e + r, (i + 1) * e + r)]) for i in xrange(l)])
ids = np.argsort(norms)
yp = ids[-self.maxLinks:]
y0 = np.setdiff1d(np.where(norms > 0), yp)
yp = np.concatenate([np.arange(y * e, (y + 1) * e) for y in yp]) + r
if len(y0) > 0:
y0 = np.concatenate([np.arange(y * e, (y + 1) * e) for y in y0]) + r
return y0, yp
def __build_projection_matrix(self, indices):
d1 = self.H.shape[0]
d2 = len(indices)
S = np.zeros((d1, d2))
S[indices] = np.eye(d2)
return S
def __sparsification(self):
x = np.arange(self.robotFeaturesDim)
y0, yp = self.__partition_links()
Sx = sparse.coo_matrix(self.__build_projection_matrix(x))
Sy0 = sparse.coo_matrix(self.__build_projection_matrix(y0))
Sxy0 = sparse.coo_matrix(self.__build_projection_matrix(np.concatenate((x, y0))))
Sxyp = sparse.coo_matrix(self.__build_projection_matrix(np.concatenate((x, yp))))
Sxy0yp = sparse.coo_matrix(self.__build_projection_matrix(np.concatenate((x, y0, yp))))
H = sparse.bsr_matrix(self.H)
Hp = Sxy0yp.dot(Sxy0yp.T).dot(H).dot(Sxy0yp).dot(Sxy0yp.T)
Ht = H - (0 if not y0.size else Hp.dot(Sy0).dot(linalg.inv(Sy0.T.dot(Hp).dot(Sy0))).dot(Sy0.T).dot(Hp)) \
+ Hp.dot(Sxy0).dot(linalg.inv(Sxy0.T.dot(Hp).dot(Sxy0))).dot(Sxy0.T).dot(Hp) \
- H.dot(Sx).dot(linalg.inv(Sx.T.dot(H).dot(Sx))).dot(Sx.T).dot(H)
eps = 1e-5
Htt = Ht.todense()
Htt[np.abs(Htt) < eps] = 0
bt = self.b + (Ht - H).dot(self.mu)
self.H = Htt
self.b = bt
def __get_mean_measurement_params(self, mu, ldmIndex):
realIndex = self.robotFeaturesDim + ldmIndex * self.envFeaturesDim
ldmMeanState = mu[realIndex: realIndex + self.envFeaturesDim]
rMeanState = mu[:self.robotFeaturesDim]
meanMes = self.mesModel.measureFunction(rMeanState, ldmMeanState)
gradMeanMes = self.mesModel.gradMeasureFunction(rMeanState, ldmMeanState, realIndex)
return meanMes, gradMeanMes
def estimate(self):
return self.mu
class EIFModel:
def __init__(self, dimension, robotFeaturesDim, envFeaturesDim, motionModel, mesModel, covMes, muInitial):
self.robotFeaturesDim = robotFeaturesDim
self.envFeaturesDim = envFeaturesDim
self.dimension = dimension
self.HH = np.eye(dimension)
self.H = np.eye(dimension)
self.b = dot(muInitial.T, self.H)
self.bb = dot(muInitial.T, self.H)
self.S = np.zeros(dimension * robotFeaturesDim).reshape((dimension, robotFeaturesDim))
self.S[:robotFeaturesDim] = np.eye(robotFeaturesDim)
self.invZ = inv(covMes)
self.motionModel = motionModel
self.mesModel = mesModel
def update(self, measures, landmarkIds, command, U):
self.__motion_update(command, U)
for ldmIndex, ldmMes in zip(landmarkIds, measures):
self.__measurement_update(ldmMes, int(ldmIndex))
return self.H, self.b
def __motion_update(self, command, U):
previousMeanState = self.estimate()
meanStateChange = self.motionModel.exact_move(previousMeanState, command)
newMeanState = clipState(previousMeanState + meanStateChange)
# TO IMPROVE
angle = previousMeanState[2, 0] # TO IMPROVE
gradMeanMotion = np.zeros_like(self.H) # TO IMPROVE
gradMeanMotion[2, 0:2] = command[0] * np.array([-math.sin(angle), math.cos(angle)]) # TO IMPROVE
IA = np.eye(self.H.shape[0]) + gradMeanMotion # TO IMPROVE
sigma = dot(dot(IA, inv(self.H)), IA.T) + dot(dot(self.S, U), self.S.T)
self.H = inv(sigma)
self.b = dot((newMeanState).T, self.H)
self.HH = self.H.copy()
self.bb = self.b.copy()
def __measurement_update(self, ldmMes, ldmIndex):
mu = self.estimate()
meanMes, gradMeanMes = self.__get_mean_measurement_params(mu, ldmIndex)
z = np.array(ldmMes).reshape(len(ldmMes), 1)
zM = np.array(meanMes).reshape(len(ldmMes), 1)
C = gradMeanMes
mesError = (z - zM)
mesError[1, 0] = clipAngle(mesError[1, 0], force=True)
mesError += dot(C.T, mu)
mesError[1, 0] = clipAngle(mesError[1, 0])
self.H += dot(dot(C, self.invZ), C.T)
self.b += dot(dot(mesError.T, self.invZ), C.T)
def __get_mean_measurement_params(self, mu, ldmIndex):
realIndex = self.robotFeaturesDim + ldmIndex * self.envFeaturesDim
ldmMeanState = mu[realIndex: realIndex + self.envFeaturesDim]
rMeanState = mu[:self.robotFeaturesDim]
meanMes = self.mesModel.measureFunction(rMeanState, ldmMeanState)
gradMeanMes = self.mesModel.gradMeasureFunction(rMeanState, ldmMeanState, realIndex)
return meanMes, gradMeanMes
def estimate(self, H=None, b=None):
H = self.H if H is None else H
b = self.b if b is None else b
return clipState(dot(b, inv(H)).T)
class EKFModel:
def __init__(self, dimension, robotFeaturesDim, envFeaturesDim, motionModel, mesModel, covMes, muInitial):
self.robotFeaturesDim = robotFeaturesDim
self.envFeaturesDim = envFeaturesDim
self.dimension = dimension
self.Sigma = np.eye(dimension)
self.mu = muInitial.copy()
self.S = np.zeros(dimension * robotFeaturesDim).reshape((dimension, robotFeaturesDim))
self.S[:robotFeaturesDim] = np.eye(robotFeaturesDim)
self.Z = covMes
self.motionModel = motionModel
self.mesModel = mesModel
def update(self, measures, landmarkIds, command, U):
self.__motion_update(command, U)
for ldmIndex, ldmMes in zip(landmarkIds, measures):
self.__measurement_update(ldmMes, int(ldmIndex))
return self.Sigma, self.mu
def __motion_update(self, command, U):
previousMeanState = self.mu
meanStateChange = self.motionModel.exact_move(previousMeanState, command)
newMeanState = clipState(previousMeanState + meanStateChange)
# TO IMPROVE
angle = previousMeanState[2, 0] # TO IMPROVE
gradMeanMotion = np.zeros_like(self.Sigma) # TO IMPROVE
gradMeanMotion[2, 0:2] = command[0] * np.array([-math.sin(angle), math.cos(angle)]) # TO IMPROVE
IA = np.eye(self.Sigma.shape[0]) + gradMeanMotion # TO IMPROVE
self.mu = newMeanState
self.Sigma = dot(dot(IA, self.Sigma), IA.T) + dot(dot(self.S, U), self.S.T)
def __measurement_update(self, ldmMes, ldmIndex):
mu = self.mu
Sigma = self.Sigma
meanMes, gradMeanMes = self.__get_mean_measurement_params(mu, ldmIndex)
z = np.array(ldmMes).reshape(len(ldmMes), 1)
zM = np.array(meanMes).reshape(len(ldmMes), 1)
C = gradMeanMes
toInvert = inv(dot(dot(C.T, Sigma), C) + self.Z)
K = dot(dot(Sigma, C), toInvert)
mesError = (z - zM)
mesError[1, 0] = clipAngle(mesError[1, 0], force=True)
mesError = dot(K, mesError)
mesError[1, 0] = clipAngle(mesError[1, 0])
self.mu += mesError
self.Sigma = dot(np.eye(self.dimension) - dot(K, C.T), Sigma)
def __get_mean_measurement_params(self, mu, ldmIndex):
realIndex = self.robotFeaturesDim + ldmIndex * self.envFeaturesDim
ldmMeanState = mu[realIndex: realIndex + self.envFeaturesDim]
rMeanState = mu[:self.robotFeaturesDim]
meanMes = self.mesModel.measureFunction(rMeanState, ldmMeanState)
gradMeanMes = self.mesModel.gradMeasureFunction(rMeanState, ldmMeanState, realIndex)
return meanMes, gradMeanMes
def estimate(self):
return self.mu
def measureFunction(rState, landmark):
rDim = 3
diff = landmark - rState[:rDim-1]
diffNorm = np.linalg.norm(diff)
angle = rState[rDim-1, 0]
diffAngle = math.atan2(diff[1], diff[0]) - angle
diffAngle = clipAngle(diffAngle)
return diffNorm, diffAngle
def gradMeasureFunction(rState, landmark, ldmIndex):
rDim = 3
eDim = 2
diff = (rState[:rDim-1] - landmark).flatten()
diffNorm = np.linalg.norm(diff)
grad = np.zeros(dimension * 2).reshape(dimension, 2)
grad[:rDim-1, 0] = diff / diffNorm
grad[ldmIndex:ldmIndex + eDim, 0] = -grad[:rDim-1, 0]
grad[:rDim-1, 1] = np.array([-diff[1], diff[0]]) / (diffNorm**2)
grad[ldmIndex:ldmIndex + eDim, 1] = -grad[:rDim-1, 1]
grad[rDim-1, 1] = -1
return grad
def clipAngle(angle, force=False):
if clip or force:
angle = (angle + math.pi) % (2 * math.pi) - math.pi
return angle
def clipState(state):
if clip:
state[2, 0] = clipAngle(state[2, 0])
return state
clip = False
if __name__ == '__main__':
dimension = None
def simu():
global dimension
T = 100 # Number of timesteps
nbLandmark = 900
maxSpeed = 5
maxRotation = 45 * math.pi / 180 # 45 # en radians
sizeMap = 50
# Robot Detection Parameters
detectionSize = 2 # 40
detectionCone = 180 * math.pi / 180 # en radians
# Dimension Constants
robotFeaturesDim = 3
envFeaturesDim = 2
commandsDim = 2
mesDim = 2
dimension = robotFeaturesDim + nbLandmark * envFeaturesDim
# Covariances for motions and measurements
covarianceMotion = np.eye(robotFeaturesDim)
covarianceMotion[0, 0] = 1 ** 2 # motion noise variance X
covarianceMotion[1, 1] = 1 ** 2 # motion noise variance Y
covarianceMotion[2, 2] = (5 * math.pi / 180) ** 2 # motion noise variance Angle
covarianceMeasurements = np.eye(mesDim)
covarianceMeasurements[0, 0] = 1 ** 2 # measurement noise variance distance
covarianceMeasurements[1, 1] = (5 * math.pi / 180) ** 2 # motion noise variance Angle
## ----------------------------------------------------------------------
## Simulation initialization
## -------------------
## State Definition
# Real robot state
state = np.zeros((dimension, 1))
x = np.linspace(-sizeMap, sizeMap, np.sqrt(nbLandmark))
y = np.linspace(-sizeMap, sizeMap, np.sqrt(nbLandmark))
xv, yv = np.meshgrid(x, y)
state[robotFeaturesDim:, 0] = np.vstack([xv.ravel(), yv.ravel()]).ravel(order="F")
# state[robotFeaturesDim:] = np.random.rand(nbLandmark * envFeaturesDim).reshape(nbLandmark * envFeaturesDim, 1) * 300 - 150
# Basic and EIF estimator for robot state
mu = state.copy()
mu[robotFeaturesDim:] += np.random.normal(0, covarianceMeasurements[0, 0], nbLandmark * envFeaturesDim).reshape(nbLandmark * envFeaturesDim, 1)
muEKF = mu.copy()
muEIF = mu.copy()
muSEIF = mu.copy()
## --------------------
## Models Definition
motionModel = BasicMovement(maxSpeed, maxRotation, covarianceMotion, measureFunction)
measurementModel = BasicMeasurement(covarianceMeasurements, robotFeaturesDim, envFeaturesDim, measureFunction, gradMeasureFunction, detectionSize, detectionCone)
ekf = EKFModel(dimension, robotFeaturesDim, envFeaturesDim, motionModel, measurementModel, covarianceMeasurements, mu)
eif = EIFModel(dimension, robotFeaturesDim, envFeaturesDim, motionModel, measurementModel, covarianceMeasurements, mu)
seif = SEIFModel(dimension, robotFeaturesDim, envFeaturesDim, motionModel, measurementModel, covarianceMeasurements, mu, 4)
mus_simple = np.zeros((T, dimension))
mus_ekf = np.zeros((T, dimension))
mus_eif = np.zeros((T, dimension))
mus_seif = np.zeros((T, dimension))
states = np.zeros((T, dimension))
mus_simple[0] = np.squeeze(mu)
mus_ekf[0] = np.squeeze(muEKF)
mus_eif[0] = np.squeeze(muEIF)
mus_seif[0] = np.squeeze(muEIF)
states[0] = np.squeeze(state)
# LOG Initial state
# print("BEFORE")
# print("EIF estimate :")
# print(muEIF)
# print("EKF estimate :")
# print(muEKF)
# print("Real state :")
# print(state)
# print('\n')
for t in range(1, T):
print("\nIteration %d" % t)
state, motionCommand = motionModel.move(state)
measures, landmarkIds = measurementModel.measure(state)
mu += motionModel.exact_move(mu, motionCommand)
# H, _ = ekf.update(measures, landmarkIds, motionCommand, covarianceMotion)
# print (H != 0).sum(), ' / ', H.size
# H, _, _ = eif.update(measures, landmarkIds, motionCommand, covarianceMotion)
# print (H != 0).sum(), ' / ', H.size
H, _, _ = seif.update(measures, landmarkIds, motionCommand, covarianceMotion)
print (H != 0).sum(), ' / ', H.size
# muEKF = ekf.estimate()
# muEIF = eif.estimate()
muSEIF = seif.estimate()
# print "np.linalg.norm(muEIF-muSEIF)"
# print np.linalg.norm(muEIF-muSEIF)
# print np.linalg.norm(eif.b - seif.b)
# print np.linalg.norm(eif.H - seif.H)
# print muEIF[:3]
# print muSEIF[:3]
mus_simple[t] = np.squeeze(mu)
# mus_ekf[t] = np.squeeze(muEKF)
# mus_eif[t] = np.squeeze(muEIF)
mus_seif[t] = np.squeeze(muSEIF)
states[t] = np.squeeze(state)
# # LOG Final state
# print('\n')
# print('AFTER')
# print("EIF estimate :")
# print(muEIF)
# # print("EKF estimate :")
# # print(muEKF)
# print("Real state :")
# print(state)
# print("Final Error EIF:")
# print(state - muEIF)
# # print("Final Error EKF:")
# # print(state - muEKF)
# print("Final Max Error EIF: %f" % max(state-muEIF))
# print("Final Norm Error EIF: %f" % np.linalg.norm(state-muEIF))
# # print("Final Max Error EKF: %f" % max(state-muEKF))
# # print("Final Norm Error EKF: %f" % np.linalg.norm(state-muEKF))
# print("Final Max Error SEIF: %f" % max(state-muSEIF))
# print("Final Norm Error SEIF: %f" % np.linalg.norm(state-muSEIF))
landmarks = state[robotFeaturesDim:].reshape(nbLandmark, 2)
plt.figure()
ax = plt.gca()
for x, y in landmarks:
ax.add_artist(Circle(xy=(x, y),
radius=detectionSize,
alpha=0.3))
plt.scatter(landmarks[:, 0], landmarks[:, 1])
plt.plot(states[:, 0], states[:, 1])
plt.plot(mus_simple[:, 0], mus_simple[:, 1])
# plt.plot(mus_ekf[:, 0], mus_ekf[:, 1])
# plt.plot(mus_eif[:, 0], mus_eif[:, 1])
plt.plot(mus_seif[:, 0], mus_seif[:, 1])
plt.legend(['Real position', 'Simple estimate', 'EKF estimate', 'EIF estimate', 'SEIF estimate'])
plt.title("{0} landmarks".format(nbLandmark))
plt.show()
import cProfile
cProfile.run('simu()')
| [
"[email protected]"
] | |
4a53042804f4489e20c7b38c8fe885c299d6caf6 | 28887a9bfd18f64a045c419037e9aba2cfd1bf50 | /typed/for/iw.py | 84da2feb585f08a82b8831cd68d318c5ef8d0e3d | [] | no_license | Coobeliues/pp2_py | 18ba46bc82edc3f0c189f5e51ec6950af5269751 | 89cffd04499691e72e9dbbf9626a84ad27fddf79 | refs/heads/main | 2023-06-28T01:13:12.602998 | 2021-07-29T05:43:17 | 2021-07-29T05:43:17 | 380,136,849 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | k=545
maxi=-1112
temple=0
while k!=0:
x=int(input())
if maxi<x:
maxi=x
k=x
print(maxi) | [
"[email protected]"
] | |
efbe5cae3f768724158b26af2d52232b3009deaf | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02889/s800108978.py | f16d83d80d9585a9e51d77414e46a2135a05fdac | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,377 | py | import sys
import math
import heapq
sys.setrecursionlimit(10**7)
INTMAX = 9323372036854775807
INTMIN = -9223372036854775808
DVSR = 1000000007
def POW(x, y): return pow(x, y, DVSR)
def INV(x, m=DVSR): return pow(x, m - 2, m)
def DIV(x, y, m=DVSR): return (x * INV(y, m)) % m
def LI(): return [int(x) for x in sys.stdin.readline().split()]
def LF(): return [float(x) for x in sys.stdin.readline().split()]
def LS(): return sys.stdin.readline().split()
def II(): return int(sys.stdin.readline())
def FLIST(n):
res = [1]
for i in range(1, n+1): res.append(res[i-1]*i%DVSR)
return res
N,M,L=LI()
LG=10**15
DIST=[[LG for _ in range(N+1)] for _ in range(N+1)]
for i in range(M):
a,b,c = LI()
if c <= L:
DIST[a][b] = c
DIST[b][a] = c
for k in range(1, N+1):
for i in range(1, N+1):
for j in range(1, N+1):
if DIST[i][j] > DIST[i][k] + DIST[k][j]:
DIST[i][j] = DIST[i][k] + DIST[k][j]
for i in range(1, N+1):
for j in range(1, N+1):
DIST[i][j] = 1 if DIST[i][j] <= L else LG
for k in range(1, N+1):
for i in range(1, N+1):
for j in range(1, N+1):
if DIST[i][j] > DIST[i][k] + DIST[k][j]:
DIST[i][j] = DIST[i][k] + DIST[k][j]
for i in range(II()):
st, en = LI()
if DIST[st][en] >= LG:
print(-1)
else:
print(DIST[st][en] - 1)
| [
"[email protected]"
] | |
2479809badda69fa0efea62428b734087221bdbb | e648becc46b5a87c5ad2760b69933eb8b34b1f2a | /stock_tracking_project/registration/migrations/0005_userstock_c_type.py | 91be33a94f72cc989c92ad03255dbace55d273e1 | [] | no_license | webclinic017/stock_analyzer-1 | 0da7f5f06415fa93c3f5ebbdf82ce1aaad700fd1 | 1664da1671cec78aed68f393cdbb4911e9c87503 | refs/heads/main | 2023-08-29T23:16:23.869305 | 2021-11-11T18:51:40 | 2021-11-11T18:51:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | # Generated by Django 3.1.3 on 2021-01-03 08:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('registration', '0004_auto_20210103_0956'),
]
operations = [
migrations.AddField(
model_name='userstock',
name='c_type',
field=models.CharField(default='W', max_length=2),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
655276d1723590878fde8fff3765da9fc89a2a83 | 7bcaf7eab49a37c4e38c0854849ef34f2ab9ebbb | /Author-Paper/PythonBenchmark/TNNPA-C.py | e20fba438300e667f38f9393e877af059414b41f | [
"BSD-2-Clause"
] | permissive | pratapbhanu/misc | 08a0e42d214754bb1ce0d41dfdba0b12f278c008 | 41c5dca20dbcfcb2390259f6268a2aab62059aaa | refs/heads/master | 2020-04-15T00:47:07.287933 | 2013-06-11T12:13:46 | 2013-06-11T12:13:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,144 | py | '''
Created on Nov 7, 2012
@author: bhanu
'''
import sys
from array import array
import numpy as np
import os
import math
class Layer(object):
'''
Represents one single layer in a Mlp
'''
def __init__(self, nNeurons, nInpsPerNeuron, transferF, ilayer):
'''
Each layer has:
nNeurons: Number of neurons
nInpsPerNeuron: Number of inputs per Neuron, not needed for input layer so use '-1' for input layer
transferF: Transfer Function, which could be 'tanh', 'logistic' or 'identity'
ilayer: Index of the layer
'''
self.nNeurons = nNeurons
self.transferF = transferF
self.ilayer = ilayer
self.nInpsPerNeuron = nInpsPerNeuron
if(ilayer != 0): #if this is not an input layer
self.W = (4)*np.random.random_sample(size=(nInpsPerNeuron+1,nNeurons)) - 2 #W[0,i] beingh the BIAS weights
self.W[0,:] = -0.5 #Bias Weight
self.net = np.zeros(nNeurons) #place holder vector for Net i.e. weighted sum for each neuron of this layer
self.out = np.zeros(nNeurons) #place holder vector for Output of each neuron of this layer
self.delta = np.zeros(nNeurons) #place holder vector for delta of this layer
class Mlp(object):
'''
Represents a Multi Layer Perceptron Network
'''
def __init__(self, layers):
'''
Constructor
Parameters:
Layers: List of 'Layer' objects
'''
self.layers = layers
self.learningRate = learningRate
def trainMlp(self, dataFile, learningRate=0.1, epochs=1000):
'''
Trains this Mlp with the training data
'''
trainSet = getTrainingData(dataFile)
return trainBPE(self,trainSet, learningRate, epochs)
def test(self):
'''
Test the trained Mlp network
'''
while(True):
print"\n\nTesting trained perzeptron network, press Ctrl+c to quit"
Xraw = raw_input("Enter inputs separated by space to test this trained Mlp: ")
Xlist = Xraw.strip().strip('\n').split(' ')
X = [float(x) for x in Xlist]
#Propagate the inputs forward to compute the outputs
outp = list(X) #output of input layer i.e. output of previous layer to be used as input for next layer
for layer in mlp.layers[1:] : #for all layers starting from the second layer
for i in range(layer.nNeurons):
layer.net[i] = weightedSum(outp, layer.W[1:,i]) + layer.W[0,i]
layer.out[i] = g(layer.net[i], layer.transferF) #pass this weighted sum through the transfer function of this layer
outp = layer.out
print "output = ", mlp.layers[-1].out
def showMlp(self):
'''
Print all the layers of this perzeptron
'''
for layer in self.layers:
print 'Layer ', layer.ilayer
print 'Number of Neurons: ', layer.nNeurons
print 'Transfer Function: ', layer.transferF
if(layer.ilayer != 0):
print 'Weights(',layer.W.shape,'): ', layer.W
print '\n'
def getTrainingData(dataFile):
#----------prepare training data from the dataFile---------
head, tail = os.path.split(dataFile)
if(head == ''):
cwd = os.path.curdir
trainingFile = os.path.join(cwd,tail)
f = open(trainingFile)
trainSet = [] #training samples
lines = f.readlines()
if(len(lines) > 1000):
terminate("File Contains more than 1000 samples")
for line in lines:
if(line[0] == '#'):
continue
X = []
Y = [] #list of inputs and oupts
x_y = line.split(' ') #Split the string in X(inputs) and Y(outputs), separated by tab
x = x_y[0].strip()
y = x_y[1].strip()
xstr = x.split() #split inputs with space
ystr = y.split() #split outputs with space
for inp in xstr:
X.append(float(inp))
for outp in ystr:
Y.append(float(outp))
trainSet.append((X,Y))
#print trainSet
return trainSet
def terminate(msg):
print """
Please run the program with valid arguments.
USAGE: $ python TNN_PA_A N M dataFile
where,
N : Dimension of Input Layer (x), less than 101
M : Dimension of Output Layer (y), less than 30
InputFile : Name of the file containing training data, if
not in current working directory of program then
provide fully qualified path, Maximum 200 samples
Example: $ python TNNPA_B 4 2 training.dat
"""
sys.exit(msg)
def trainBPE(mlp, trainSet, learningRate, maxEpoch):
'''
Training of Multi-layer perceptron using Backpropagation of Error
Parameters:-
mlp: Object of Mlp class
trainSet: List of training tuples,
use method 'getTrainingData()' to get a valid training
set from a training data file
'''
iteration = 1
f = open('learning.curve', 'w')
f.write('#Epoch-Number #Mean Maximum Single Error \n')
while(True):
meanMaxError = maxerror = 0
for x, y in trainSet :
#Propagate the inputs forward to compute the outputs
outp = list(x) #output of input layer i.e. output of previous layer to be used as input for next layer
for layer in mlp.layers[1:] : #for all layers starting from the second layer
for i in range(layer.nNeurons):
layer.net[i] = weightedSum(outp, layer.W[1:,i]) + layer.W[0,i]
layer.out[i] = g(layer.net[i], layer.transferF) #pass this weighted sum through the transfer function of this layer
outp = layer.out
#Propagate deltas backward from output layer to input layer
layer = mlp.layers[-1]
for m in range(layer.nNeurons): #for neurons in output layer
layer.delta[m] = derivativeOfG(layer.net[m], layer.transferF) * (y[m] - layer.out[m])
deltaP = layer.delta # delta of a layer to be used by a layer above it, starting from output layer
for l in range(len(mlp.layers)-2,0,-1) : # for all hidden layers until input layer
thislayer = mlp.layers[l]
layerbelow = mlp.layers[l+1]
for h in range(layer.nNeurons):
thislayer.delta[h] = derivativeOfG(thislayer.net[h], thislayer.transferF) * weightedSum(deltaP, layerbelow.W[h+1,:])
deltaP = thislayer.delta # for the next layer
#Update every weight in network using deltas
out_i = list(x)
for layer in mlp.layers[1:] :
#update current weights
for i, inp in enumerate(out_i):
for j in range(layer.nNeurons):
layer.W[i+1,j] += learningRate * (inp * layer.delta[j])
out_i = layer.out
error = [math.fabs(value) for value in y - mlp.layers[-1].out ]
maxerror += max(error)
meanMaxError = maxerror/len(trainSet)
f.write(str(iteration)+' '+str(meanMaxError)+'\n')
if(iteration > maxEpoch):
break
iteration += 1
f.close()
return mlp, iteration
def g(inp, transferF):
if transferF == 'tanh':
value = math.tanh(inp)
elif transferF == 'identity':
value = inp
elif transferF == 'logistic':
value = 1 / (1 + math.exp(-inp))
else :
raise ValueError('Invalid transfer function type: ', transferF)
return value
def isStoppingCriterion():
return False
def derivativeOfG(inp, transferF):
if transferF == 'tanh':
temp = math.tanh(inp)
value = 1 - temp*temp # 1 - tanh^2
elif transferF == 'identity':
value = 0 # derivative of Identity function is zero
elif transferF == 'logistic':
temp = 1 / (1 + math.exp(-inp))
value = temp*(1-temp) # derivative of logistic function is f*(1-f)
else :
raise ValueError('Invalid transfer function type: ', transferF)
return value
def weightedSum(inputVector, weights):
# print inputVector
# print weights
sum = (np.sum(inputVector*weights))
# print sum
return sum
if __name__ == '__main__':
N = 4 #N : number of inputs/neurons for input layer
H1 = 10 #H : number of neurons in hidden layer-1
#H2 = 5
M = 2 #number of outputs/neurons of the output layer
dataFile = 'training.dat'
learningRate = 0.1
epochs = 5000
#define layers of MLP keeping in mind that output of one layer is the number of inputs for the next layer
layer0 = Layer(nNeurons=N, nInpsPerNeuron=-1, transferF='identity', ilayer=0) #input layer
layer1 = Layer(nNeurons=H1, nInpsPerNeuron=N, transferF='tanh', ilayer=1) #hidden layer 1
layer2 = Layer(nNeurons=M, nInpsPerNeuron=H1, transferF='tanh', ilayer=2) #output layer
#layer3 = Layer(nNeurons=M, nInpsPerNeuron=H2, transferF='logistic', ilayer=3) #output layer
layers = [layer0, layer1, layer2 ]
mlp = Mlp(layers)
mlp.showMlp()
print "\n\nTraining Mlp for", epochs," Epochs.... please wait... "
trainedMlp, iterations = mlp.trainMlp(dataFile, learningRate, epochs)
print "\n\nFinished training of Mlp "
trainedMlp.showMlp()
mlp.test() | [
"[email protected]"
] | |
7dbabdfd9aeebe7cb5bacb8c2927bb7147f582f9 | 96b9c52f3ac5dea667803a0b9ecdb838f2b579ab | /models/feedback.py | e4dd1979fe2eeccdb49085144f9d41260fdb16d8 | [
"MIT"
] | permissive | livoras/feifanote-server | b581afb3b90dc7c8296f2573077079fa269ae80d | 319907f8e107757470160cb3dc0adba14357bed1 | refs/heads/master | 2020-12-24T13:52:15.497940 | 2014-06-26T05:14:11 | 2014-06-26T05:14:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | # -*- coding: utf-8 -*-
import json
import sqlalchemy as sc
from sqlalchemy.orm import relationship, backref
from common.db import Base
class Feedback(Base):
__tablename__ = "feedbacks"
id = sc.Column(sc.Integer, primary_key=True)
content = sc.Column(sc.String)
user_id = sc.Column(sc.Integer, sc.ForeignKey("users.id"))
user = relationship("User", backref="feedbacks")
def __init__(self, **data):
self.__dict__.update(data)
def dict(self):
attrs = ("id", "content")
return {attr: getattr(self, attr) for attr in attrs}
def __repr__(self):
json.dumps(self.dict())
| [
"[email protected]"
] | |
9e42099374ca1dba84d9401cee09e7264ea2caa5 | ace419028e75740f75b6802809282056c9cd7e2b | /training_script.py | 498e39512400f16b3d6a529ca9dc9a2cf96b69ec | [] | no_license | gtambi143/machine_learning_pipeline | 44a2e8f037b7bd899828f2b404aee697dc450aae | f01e43c580eb695a7f3caf609ef9c1987a8b664e | refs/heads/master | 2022-08-31T19:34:16.411484 | 2020-05-24T20:54:44 | 2020-05-24T20:54:44 | 266,617,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,228 | py | #importing the required libraries
import torch
import torchvision
from PIL import Image
import os
from torch.utils.data import Dataset, DataLoader, RandomSampler
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
import time
import os
import copy
import torchvision.transforms as transforms
import datetime
import shutil
#replace the below paths with respect to the location of this training script
CURRENT_MODEL_PATH = "model/current_model/alexnet_model_current_prod.pth"
OLD_MODEL_DIR = "model/old_models/"
TRAINING_SAMPLE_DIR = "model/training_samples/"
TRAINED_SAMPLE_DIR = "model/trained_samples/"
IMAGENET_LABELS_FILE = "model/imagenet-classes.txt"
MIN_IMAGES = 4 # setting this low value for testing purpose only
#run this script only where there are more than min_images in training samples.
# we are setting this condition because training model on less number of samples will not be usefull
if len(os.listdir(TRAINING_SAMPLE_DIR)) > MIN_IMAGES:
#Inheriting Dataset class of Pytorch so that we can create a dataset from our training samples
class ImageNet_Dataset(Dataset):
#the constructor of the class will take 3 parameters
# img_dir - directory where the training images are placed
# label_file - directory where the lable file is placed which contains all the labels
# transform - transformation which will be applied on the images
def __init__(self, img_dir, label_file, transform = None):
self.img_dir = img_dir
self.label_file = label_file
self.transform = transform
def __len__(self):
return len(os.listdir(self.img_dir))
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.to_list()
img_name = os.listdir(self.img_dir)[idx]
img_path = os.path.join(self.img_dir, img_name)
img = Image.open(img_path)
img = self.transform(img)
img_label = img_name.split("_label$$")[1].split(".")[0]
#preparing the label list from the file
label_list = [f.split("\n")[0] for f in open(self.label_file)]
#storing label index
label_index = label_list.index(img_label)
#returning the image and its label's index
return img, label_index
#define transformations to apply on the training samples
transform = transforms.Compose([ #[1]
transforms.Resize(256), #[2]
transforms.CenterCrop(224), #[3]
transforms.ToTensor(), #[4]
transforms.Normalize( #[5]
mean=[0.485, 0.456, 0.406], #[6]
std=[0.229, 0.224, 0.225] #[7]
)])
#preparing the dataset of the images present in the training samples folder
img_dataset = ImageNet_Dataset(img_dir = TRAINING_SAMPLE_DIR, label_file = IMAGENET_LABELS_FILE, transform = transform)
#dataloader from dataset
dataloaders = DataLoader(img_dataset, batch_size = 16, shuffle = True)
#function to train the model
def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
since = time.time()
# best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
running_loss = 0.0
running_corrects = 0
# Iterate over data.
for inputs, labels in dataloaders:
inputs = inputs.to(device)
labels = labels.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward
# track history
with torch.set_grad_enabled(True):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
# backward + optimize
loss.backward()
optimizer.step()
# statistics
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
scheduler.step()
epoch_loss = running_loss / len(img_dataset)
epoch_acc = running_corrects.double() / len(img_dataset)
print('{} Loss: {:.4f} Acc: {:.4f}'.format("train", epoch_loss, epoch_acc))
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
return model
# getting the available device
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
#declaring the model variable
model_ft = torchvision.models.alexnet()
#loading the model from current model
model_ft.load_state_dict(torch.load(CURRENT_MODEL_PATH))
#transfer model to the available device
model_ft = model_ft.to(device)
criterion = nn.CrossEntropyLoss()
# Observe that all parameters are being optimized
optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9)
# Decay LR by a factor of 0.1 every 7 epochs
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)
#newly trained model
model_ft = train_model(model_ft, criterion, optimizer_ft, exp_lr_scheduler, num_epochs=10)
#move the training samples to trained samples folder
for f in os.listdir(TRAINING_SAMPLE_DIR):
shutil.move(TRAINING_SAMPLE_DIR+f, TRAINED_SAMPLE_DIR)
#old_model_name
now = datetime.datetime.now()
old_model_name = 'alexnet_model_'+ str(now.strftime("%Y-%m-%d_%H_%M_%S"))+".pth"
#move the current production model to the old folder
shutil.move(CURRENT_MODEL_PATH, OLD_MODEL_DIR + old_model_name)
#save the new model in current_model folder which is our production model
torch.save(model_ft.state_dict(), CURRENT_MODEL_PATH) | [
"[email protected]"
] | |
743ecc68d8c7d1e1666f1c43d4b752fb6c808bf0 | e615ec0eaec8466f6dbd8bc1cbc62839d760467e | /DASH-Plotly/Plotly/1-02-ScatterPlots/scatter1.py | 9c598f72b200791eef6894630887ffc1ba38f1ba | [] | no_license | SayantanMitra87/Web_Application-Plotly-Dash-Flask | cd5743defed1707a475e2273e68894876673c46e | 9bd462c380eef2919d86b0f2daa870e3a8a24097 | refs/heads/master | 2020-04-15T01:58:24.751145 | 2019-01-15T07:43:25 | 2019-01-15T07:43:25 | 164,298,513 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | #######
# This plots 100 random data points (set the seed to 42 to
# obtain the same points we do!) between 1 and 100 in both
# vertical and horizontal directions.
######
import plotly.offline as pyo
import plotly.graph_objs as go
import numpy as np
np.random.seed(42)
random_x = np.random.randint(1,101,100)
random_y = np.random.randint(1,101,100)
data = [go.Scatter(
x = random_x,
y = random_y,
mode = 'markers',
)]
pyo.plot(data, filename='scatter1_secondrun.html')
| [
"[email protected]"
] | |
2ae5013c86e17afbbe7484348e9fda0a3165d0ef | 6cfbe44acfa9d3fa87d9ad8332d9022fc8db58b6 | /urls_shorten/asgi.py | 11018bcdd8ec1c9bc262c095535604c568649561 | [] | no_license | isha27255/Url-Shortner- | 23379935d8a20842f91d1d13a86bd8e9cbd53bb9 | 36243e8e9c09dd1abd4c158ef6e18152da4be527 | refs/heads/master | 2022-12-28T22:47:43.723908 | 2020-10-07T06:08:00 | 2020-10-07T06:08:00 | 296,254,275 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
ASGI config for urls_shorten project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'urls_shorten.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
57cea6e03a2b880024e86045f1beb2b8b79424c2 | a39c100e99a72099e07d86dd9565fe71c47d4b1d | /lib/shared_parameters.py | c2547dbbfdef97bedecd568069f7fa1d475a07d0 | [
"MIT"
] | permissive | Thomas84/SSG.extension | 51b0c2ff989f1aec82c7fdd40fb4f13e767e4822 | ea0e0472e1c3940a63cab8c013228cba6425595a | refs/heads/master | 2023-07-26T04:56:52.002009 | 2021-09-08T16:27:56 | 2021-09-08T16:27:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,200 | py | from pyrevit import revit, DB
from Autodesk.Revit.Exceptions import ArgumentException, InvalidOperationException
STANDARD_PARAMETERS = {
"STD_Widths": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MIN_Width": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MAX_Width": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"STD_Depths": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MIN_Depth": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MAX_Depth": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"STD_Heights": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MIN_Height": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"MAX_Height": DB.BuiltInParameterGroup.PG_CONSTRAINTS,
"INFO_Lead Time": DB.BuiltInParameterGroup.PG_CONSTRUCTION,
"URL_Warranty": DB.BuiltInParameterGroup.PG_CONSTRUCTION,
"SSG_Short Description": DB.BuiltInParameterGroup.PG_TEXT,
"SSG_Long Description": DB.BuiltInParameterGroup.PG_TEXT,
"URL_Finish Options": DB.BuiltInParameterGroup.PG_MATERIALS,
"ACTUAL_Weight": DB.BuiltInParameterGroup.PG_STRUCTURAL,
"ACTUAL_Width": DB.BuiltInParameterGroup.PG_GEOMETRY,
"ACTUAL_Depth": DB.BuiltInParameterGroup.PG_GEOMETRY,
"ACTUAL_Height": DB.BuiltInParameterGroup.PG_GEOMETRY,
"URL_Sustainability": DB.BuiltInParameterGroup.PG_GREEN_BUILDING,
"TOTAL_List Price": DB.BuiltInParameterGroup.PG_DATA,
"zC": DB.BuiltInParameterGroup.INVALID,
"zM": DB.BuiltInParameterGroup.INVALID,
"zO": DB.BuiltInParameterGroup.INVALID,
"zP": DB.BuiltInParameterGroup.INVALID,
"SSGFID": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"SSGTID": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"SSG_Author": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"SSG_Product Code": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"SSG_Toll Free Number": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"URL_Contact Southwest Solutions Group": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"URL_Installation Manual": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"URL_Product Page": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
"URL_Specification Manual": DB.BuiltInParameterGroup.PG_IDENTITY_DATA,
}
def get_shared_param_by_name(name):
app = revit.doc.Application
shared_parameters_file = app.OpenSharedParameterFile()
shared_groups = shared_parameters_file.Groups
params = []
for group in shared_groups:
for p in group.Definitions:
if p.Name == name:
params.append(p)
if len(params) > 0:
return params[0]
def get_all_shared_names():
app = revit.doc.Application
shared_parameters_file = app.OpenSharedParameterFile()
shared_groups = shared_parameters_file.Groups
params = []
for group in shared_groups:
for p in group.Definitions:
params.append(p.Name)
return params
# Must be in the context of a Revit Transaction
def replace_with_shared(fam_param, shared_param):
replaced_param = None
if fam_param.Definition.Name == shared_param.Name:
revit.doc.FamilyManager.RenameParameter(
fam_param, fam_param.Definition.Name + "_Temp"
)
try:
replaced_param = revit.doc.FamilyManager.ReplaceParameter(
fam_param,
shared_param,
fam_param.Definition.ParameterGroup,
fam_param.IsInstance,
)
except InvalidOperationException as ie:
print("InvalidOperationExcpetion: {}".format(ie))
except ArgumentException as ae:
print("ArgumentExcpetion: {}".format(ae))
return replaced_param
def add_standards():
params = []
for fam_param in revit.doc.FamilyManager.Parameters:
fam_name = fam_param.Definition.Name
if fam_name in STANDARD_PARAMETERS.keys():
STANDARD_PARAMETERS.pop(fam_name, None)
replaced_param = replace_with_shared(
fam_param, get_shared_param_by_name(fam_name)
)
params.append(replaced_param)
for k, v in STANDARD_PARAMETERS.items():
shared_param = get_shared_param_by_name(k)
new_param = revit.doc.FamilyManager.AddParameter(shared_param, v, False)
params.append(new_param)
return params
| [
"[email protected]"
] | |
232fda255df912f5695d81f10fe1a9d460d331e7 | 6f7abcf8f1649db723a6b51f9332d7373a5ce4c7 | /BoardGameCommunity/Shop_ActivityDetail.py | 8e72afeda42da763dd88379bb0a0774e6d4efed7 | [] | no_license | NaemaPDz/BoardGameCommunity | 7a6a0b087ba3cc61718bc4bcca7d6577095a281f | 3abaae6d934c267e4134725dfe07cb2f7a1cf405 | refs/heads/main | 2023-03-31T15:14:41.316934 | 2021-04-13T06:12:29 | 2021-04-13T06:12:29 | 357,442,527 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,499 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI/Shop_ActivityDetail.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMessageBox
import ApplicationManagement as am
import Shop_ActivityList, Shop_ActivityMember
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(470, 710)
Dialog.setFixedSize(470, 710)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("img/Icon.png"), QtGui.QIcon.Selected, QtGui.QIcon.On)
Dialog.setWindowIcon(icon)
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(12)
Dialog.setFont(font)
Dialog.setStyleSheet("background-color: rgb(235, 235, 255);")
self.lbl_title = QtWidgets.QLabel(Dialog)
self.lbl_title.setGeometry(QtCore.QRect(20, 20, 431, 61))
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(20)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.lbl_title.setFont(font)
self.lbl_title.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"color: rgb(36, 36, 111);\n"
"font: 16pt \"Bai Jamjuree\";")
self.lbl_title.setTextFormat(QtCore.Qt.AutoText)
self.lbl_title.setAlignment(QtCore.Qt.AlignCenter)
self.lbl_title.setObjectName("lbl_title")
self.btn_cancelActivity = QtWidgets.QPushButton(Dialog)
self.btn_cancelActivity.setGeometry(QtCore.QRect(40, 600, 391, 41))
self.btn_cancelActivity.setStyleSheet("background-color: rgb(200, 9, 19);\n"
"color: rgb(255, 255, 255);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"border-radius: 10px;\n"
"border: 2px solid rgb(200, 9, 19);")
self.btn_cancelActivity.setObjectName("btn_cancelActivity")
self.btn_back = QtWidgets.QPushButton(Dialog)
self.btn_back.setGeometry(QtCore.QRect(360, 660, 91, 31))
self.btn_back.setStyleSheet("background-color: rgba(0, 0, 0, 0);\n"
"color: rgb(200, 9, 19);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"border-radius: 10px;\n"
"border: 2px solid rgb(200, 9, 19);")
self.btn_back.setObjectName("btn_back")
self.label_10 = QtWidgets.QLabel(Dialog)
self.label_10.setGeometry(QtCore.QRect(40, 300, 211, 31))
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_10.setFont(font)
self.label_10.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"color: rgb(36, 36, 111);")
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(Dialog)
self.label_11.setGeometry(QtCore.QRect(40, 90, 211, 31))
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_11.setFont(font)
self.label_11.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"color: rgb(36, 36, 111);")
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(Dialog)
self.label_12.setGeometry(QtCore.QRect(40, 160, 211, 31))
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_12.setFont(font)
self.label_12.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"color: rgb(36, 36, 111);")
self.label_12.setObjectName("label_12")
self.label_14 = QtWidgets.QLabel(Dialog)
self.label_14.setGeometry(QtCore.QRect(40, 230, 211, 31))
font = QtGui.QFont()
font.setFamily("ZCOOL QingKe HuangYou")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_14.setFont(font)
self.label_14.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"color: rgb(36, 36, 111);")
self.label_14.setObjectName("label_14")
self.lbl_date = QtWidgets.QLabel(Dialog)
self.lbl_date.setGeometry(QtCore.QRect(40, 120, 391, 41))
font = QtGui.QFont()
font.setFamily("Bai Jamjuree")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.lbl_date.setFont(font)
self.lbl_date.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"Bai Jamjuree\";")
self.lbl_date.setObjectName("lbl_date")
self.lbl_time = QtWidgets.QLabel(Dialog)
self.lbl_time.setGeometry(QtCore.QRect(40, 190, 381, 41))
font = QtGui.QFont()
font.setFamily("Bai Jamjuree")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.lbl_time.setFont(font)
self.lbl_time.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"Bai Jamjuree\";")
self.lbl_time.setObjectName("lbl_time")
self.lbl_member = QtWidgets.QLabel(Dialog)
self.lbl_member.setGeometry(QtCore.QRect(40, 260, 381, 41))
font = QtGui.QFont()
font.setFamily("Bai Jamjuree")
font.setPointSize(12)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.lbl_member.setFont(font)
self.lbl_member.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"Bai Jamjuree\";")
self.lbl_member.setObjectName("lbl_member")
self.tbro_description = QtWidgets.QTextBrowser(Dialog)
self.tbro_description.setGeometry(QtCore.QRect(40, 330, 381, 101))
self.tbro_description.setStyleSheet("font: 12pt \"Bai Jamjuree\";\n"
"background-color: rgba(255, 255, 255, 0);\n"
"border: 0px;")
self.tbro_description.setObjectName("tbro_description")
self.btn_viewMember = QtWidgets.QPushButton(Dialog)
self.btn_viewMember.setGeometry(QtCore.QRect(320, 260, 111, 41))
self.btn_viewMember.setStyleSheet("background-color: rgba(255, 255, 255, 0);\n"
"font: 12pt \"ZCOOL QingKe HuangYou\";\n"
"color: rgb(85, 85, 255);\n"
"border-radius: 10px;\n"
"border: 2px solid rgb(85, 85, 255);")
self.btn_viewMember.setObjectName("btn_viewMember")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
self.this_dialog = Dialog
self.ShowActivityDetail()
self.btn_cancelActivity.clicked.connect(self.CancelActivity)
self.btn_viewMember.clicked.connect(self.ViewActivityMember)
self.btn_back.clicked.connect(self.BackToShopActivity)
def __init__(self, passData):
self.userData = passData['userData']
activityList = am.FetchActivityList({'_id': passData['activityID']})
self.activityData: None
for i in activityList:
self.activityData = i
break
def ShowActivityDetail(self):
self.lbl_title.setText(self.activityData['activityName'])
self.lbl_date.setText(self.activityData['date'])
self.lbl_time.setText("{} - {}".format(self.activityData['startTime'], self.activityData['endTime']))
self.lbl_member.setText("{} / {}".format(self.activityData['currentPlayer'], self.activityData['maxPlayer']))
self.tbro_description.setText(self.activityData['description'])
def ViewActivityMember(self):
activityData = {'activityName': self.activityData['activityName'],
'maxPlayer': self.activityData['maxPlayer'],
'joinedPlayerID': self.activityData['joinedPlayerID']}
am.OpenNewWindowDialog(Shop_ActivityMember, passData=activityData)
def CancelActivity(self):
reply = am.ShowConfirmBox("Cancel Activity",
"Cancel Activity [{}].\nAre you sure ?".format(self.activityData['activityName']))
if reply:
try:
am.DeleteActivity(self.activityData['_id'])
am.ShowMessageBox("Cancel Activity",
"Cancel Activity [{}]\nSuccessful !!".format(self.activityData['activityName']),
QMessageBox.Information)
self.BackToShopActivity()
except:
am.ShowMessageBox("Cancel Activity",
"Cancel Activity [{}]\nFailed !!\nPlease try again !!".format(
self.activityData['activityName']),
QMessageBox.Critical)
def BackToShopActivity(self):
am.ChangeWindowDialog(self.this_dialog, Shop_ActivityList, passData=self.userData)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Board Game Community"))
self.lbl_title.setText(_translate("Dialog", "**ACTIVITY NAME**"))
self.btn_cancelActivity.setText(_translate("Dialog", "Cancel Activity"))
self.btn_back.setText(_translate("Dialog", "Back"))
self.label_10.setText(_translate("Dialog", "Description"))
self.label_11.setText(_translate("Dialog", "Date and Time"))
self.label_12.setText(_translate("Dialog", "Time"))
self.label_14.setText(_translate("Dialog", "Member"))
self.lbl_date.setText(_translate("Dialog", "**DD-MMM-YY**"))
self.lbl_time.setText(_translate("Dialog", "**START TIME - END TIME**"))
self.lbl_member.setText(_translate("Dialog", "**00 / 00**"))
self.tbro_description.setHtml(_translate("Dialog",
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Bai Jamjuree\'; font-size:12pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">**DESCRIPTION</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">DESCRIPTION**</p></body></html>"))
self.btn_viewMember.setText(_translate("Dialog", "View Member"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
403050852dd2e8392f1e8610f4911bf3608ab119 | 9ee751382146d280c0105981e2e54fa900cb04de | /djblets/util/tests/test_compressed_tags.py | 1d1d87775890230056755ea9767bb66c60caefae | [] | no_license | lmyfzx/djblets | 25c3d3fb2478047eede05238b60b6d16598f9131 | 33b4475cfabe24644335093a028d7d2aabc4ab84 | refs/heads/master | 2023-02-03T18:20:46.873799 | 2020-12-22T10:58:35 | 2020-12-22T10:58:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,966 | py | """Unit tests for djblets.util.templatetags.djblets_forms."""
from __future__ import unicode_literals
import os
from django.conf import settings
from django.template import Context, Template
from pipeline.conf import settings as pipeline_settings
from djblets.testing.testcases import TestCase
class CompressedTagsTests(TestCase):
"""Unit tests for the {% compressed_* %} template tags."""
def test_compressed_css_tag(self):
"""Testing {% compressed_css %}"""
self._touch_files(['test.css', 'test.d41d8cd98f00.css'])
pipeline_settings.STYLESHEETS = {
'test': {
'source_filenames': [],
'output_filename': 'test.css',
}
}
t = Template('{% load compressed %}'
'{% compressed_css "test" %}')
self.assertHTMLEqual(
t.render(Context({'test': 'test'})),
'<link href="/test.d41d8cd98f00.css" rel="stylesheet"'
' type="text/css" />')
def test_compressed_js_tag(self):
"""Testing {% compressed_js %}"""
self._touch_files(['test.js', 'test.d41d8cd98f00.js'])
pipeline_settings.JAVASCRIPT = {
'test': {
'source_filenames': [],
'output_filename': 'test.js',
}
}
t = Template('{% load compressed %}'
'{% compressed_js "test" %}')
self.assertHTMLEqual(
t.render(Context({'test': 'test'})),
'<script type="text/javascript" src="/test.d41d8cd98f00.js"'
' charset="utf-8"></script>')
def _touch_files(self, filenames):
"""Create one or more empty static media files.
Args:
filenames (list of unicode):
The list of static media files to create.
"""
for filename in filenames:
with open(os.path.join(settings.STATIC_ROOT, filename), 'w'):
pass
| [
"[email protected]"
] | |
edf6ec9094282214c247789c19af30388e1fb891 | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /xlsxwriter/test/styles/test_styles06.py | ecba383c9b2848fa80c25090f9d3c53d0e528278 | [
"BSD-2-Clause"
] | permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,183 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
import unittest
from io import StringIO
from ..helperfunctions import _xml_to_list
from ...styles import Styles
from ...workbook import Workbook
class TestAssembleStyles(unittest.TestCase):
"""
Test assembling a complete Styles file.
"""
def test_assemble_xml_file(self):
"""Test for border colour styles."""
self.maxDiff = None
fh = StringIO()
style = Styles()
style._set_filehandle(fh)
workbook = Workbook()
workbook.add_format({
'left': 1,
'right': 1,
'top': 1,
'bottom': 1,
'diag_border': 1,
'diag_type': 3,
'left_color': 'red',
'right_color': 'red',
'top_color': 'red',
'bottom_color': 'red',
'diag_color': 'red'})
workbook._set_default_xf_indices()
workbook._prepare_format_properties()
style._set_style_properties([
workbook.xf_formats,
workbook.palette,
workbook.font_count,
workbook.num_format_count,
workbook.border_count,
workbook.fill_count,
workbook.custom_colors,
workbook.dxf_formats,
workbook.has_comments,
])
style._assemble_xml_file()
workbook.fileclosed = 1
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<fonts count="1">
<font>
<sz val="11"/>
<color theme="1"/>
<name val="Calibri"/>
<family val="2"/>
<scheme val="minor"/>
</font>
</fonts>
<fills count="2">
<fill>
<patternFill patternType="none"/>
</fill>
<fill>
<patternFill patternType="gray125"/>
</fill>
</fills>
<borders count="2">
<border>
<left/>
<right/>
<top/>
<bottom/>
<diagonal/>
</border>
<border diagonalUp="1" diagonalDown="1">
<left style="thin">
<color rgb="FFFF0000"/>
</left>
<right style="thin">
<color rgb="FFFF0000"/>
</right>
<top style="thin">
<color rgb="FFFF0000"/>
</top>
<bottom style="thin">
<color rgb="FFFF0000"/>
</bottom>
<diagonal style="thin">
<color rgb="FFFF0000"/>
</diagonal>
</border>
</borders>
<cellStyleXfs count="1">
<xf numFmtId="0" fontId="0" fillId="0" borderId="0"/>
</cellStyleXfs>
<cellXfs count="2">
<xf numFmtId="0" fontId="0" fillId="0" borderId="0" xfId="0"/>
<xf numFmtId="0" fontId="0" fillId="0" borderId="1" xfId="0" applyBorder="1"/>
</cellXfs>
<cellStyles count="1">
<cellStyle name="Normal" xfId="0" builtinId="0"/>
</cellStyles>
<dxfs count="0"/>
<tableStyles count="0" defaultTableStyle="TableStyleMedium9" defaultPivotStyle="PivotStyleLight16"/>
</styleSheet>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"[email protected]"
] | |
19c0f24be411fc989b3a537f051e4021fe85c060 | 7f32883804578d960fa2dc2b3fded332f839afe4 | /except remove numbers/word-level/NB+SVM+LSTM+CNN.py | 15b67e06d1ee8580e744b4b71e04c6e2ca38557c | [] | no_license | liang23333/chinese-sentiment-analysis-preprocess-paper | 7fa3dc947f26205c7004433e3f041353e2dc350f | 37a5be75a8121967510fead3c7e077f61ba281fd | refs/heads/master | 2021-01-20T07:38:04.062624 | 2017-05-10T14:54:34 | 2017-05-10T14:54:34 | 90,021,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,560 | py |
# coding: utf-8
# In[1]:
# In[1]:
neg=[]
for i in range(1000):
fname="C:\\LAWHCA\\chinese-sentiment--analysis-preprocess\\neg\\neg."+str(i)+".txt"
print(fname)
with open(fname, "r",errors="ignore") as f:
neg.append(f.read())
# In[2]:
pos=[]
for i in range(1000):
fname="C:\\LAWHCA\\chinese-sentiment--analysis-preprocess\\pos\\pos."+str(i)+".txt"
print(fname)
with open(fname, "r",errors="ignore") as f:
pos.append(f.read())
# In[14]:
import random
data_all=pos+neg
data_dict={}
for i in range(1000):
data_dict[data_all[i]]=1
for i in range(1000):
data_dict[data_all[i+1000]]=0
print(len(data_dict))
print(data_all[0])
random.shuffle(data_all)
print(data_all[0])
label=[]
for i in range(2000):
label.append(data_dict[data_all[i]])
# put data in label and data_all list
# In[2]:
from Remove_link import remove_link
from Remove_number import remove_number
from Remove_punctuation import remove_punctuation
from Remove_stopwords import remove_stopwords
from Replace_netword import replace_netword
from Replace_repeatwords import replace_repeatwords
from Replace_ywz import replace_ywz
from Translate_eng import translate_eng
import time
for i in range(2000):
data_all[i]=translate_eng(data_all[i])
data_all[i]=replace_ywz(data_all[i])
data_all[i]=replace_repeatwords(data_all[i])
data_all[i]=replace_netword(data_all[i])
data_all[i]=remove_stopwords(data_all[i])
data_all[i]=remove_punctuation(data_all[i])
#data_all[i]=remove_number(data_all[i])
data_all[i]=remove_link(data_all[i])
print(i)
# In[3]:
import jieba
all_data=[]
for i in range(2000):
seg_list=jieba.cut(data_all[i])
seg_list=" ".join(seg_list)
all_data.append(seg_list)
print(all_data[0])
print(all_data[1999])
print(type(all_data[0]))
print(data_all[0])
print(data_all[1999])
print(type(data_all[0]))
# In[4]:
from sklearn.feature_extraction.text import TfidfVectorizer as TFIDF
tfidf = TFIDF(min_df=5, # 最小支持度为2
max_features=None,
strip_accents='unicode',
analyzer='word',
token_pattern=r'\w{1,}',
ngram_range=(1,1), # 1元文法模型
use_idf=1,
smooth_idf=1,
sublinear_tf=1)
# In[5]:
tfidf.fit(all_data)
all_data = tfidf.transform(all_data)
print(type(all_data))
# In[6]:
print(all_data.shape)
data=all_data
# In[7]:
print(tfidf.vocabulary_)
# In[8]:
from sklearn.naive_bayes import MultinomialNB as MNB
model_NB = MNB()
model_NB.fit(all_data[:1500], label[:1500])
MNB(alpha=1.0, class_prior=None, fit_prior=True)
from sklearn.cross_validation import cross_val_score
import numpy as np
print("多项式贝叶斯分类器10折交叉验证得分: ", np.mean(cross_val_score(model_NB, all_data[:1500], label[:1500], cv=10, scoring='roc_auc')))
sum=0
test_predicted =model_NB.predict(all_data[1500:])
for i in range(500):
if(test_predicted[i]==label[1500+i]):
sum=sum+1
print(sum/500)
# In[9]:
for i in range(20,21):
import numpy as np
from sklearn.decomposition import PCA
pca=PCA(n_components=i)
newData=pca.fit_transform(all_data.toarray())
from sklearn import svm
CLF=svm.SVC()
CLF.fit(newData[:1500],label[:1500])
from sklearn.cross_validation import cross_val_score
print("SVM分类器10折交叉验证得分: ", np.mean(cross_val_score(CLF, newData[:1500], label[:1500], cv=10, scoring='roc_auc')))
sum=0
test_predicted =CLF.predict(newData[1500:])
for i in range(500):
if(test_predicted[i]==label[1500+i]):
sum=sum+1
print(sum/500)
# In[10]:
print(all_data.shape)
data=all_data
t=all_data
print(type(t.toarray()))
print(type(t))
# In[11]:
from sklearn.decomposition import PCA
print(all_data.shape)
pca=PCA(n_components=20)
data=pca.fit_transform(all_data.toarray())
print(data.shape)
max_sequence_length=data.shape[1]
import os
import re
import io
import requests
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from zipfile import ZipFile
from tensorflow.python.framework import ops
ops.reset_default_graph()
sess = tf.Session()
# Set RNN parameters
epochs = 500
batch_size = 100
rnn_size = 10
embedding_size = 1
min_word_frequency = 10
learning_rate = 0.0005
dropout_keep_prob = tf.placeholder(tf.float32)
data=data.reshape((2000,max_sequence_length,1))
label=np.array(label)
x_train, x_test = data[:1500], data[1500:]
y_train, y_test = label[:1500], label[1500:]
x_data = tf.placeholder(tf.float32, [None, max_sequence_length,embedding_size])
y_output = tf.placeholder(tf.int32, [None])
if tf.__version__[0]>='1':
cell=tf.contrib.rnn.BasicRNNCell(num_units = rnn_size)
else:
cell = tf.nn.rnn_cell.BasicRNNCell(num_units = rnn_size)
output, state = tf.nn.dynamic_rnn(cell, x_data, dtype=tf.float32)
output = tf.nn.dropout(output, dropout_keep_prob)
# Get output of RNN sequence
output = tf.transpose(output, [1, 0, 2])
last = tf.gather(output, int(output.get_shape()[0]) - 1)
weight = tf.Variable(tf.truncated_normal([rnn_size, 2], stddev=0.1))
bias = tf.Variable(tf.constant(0.1, shape=[2]))
logits_out = tf.nn.softmax(tf.matmul(last, weight) + bias)
# Loss function
losses = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits_out, labels=y_output) # logits=float32, labels=int32
loss = tf.reduce_mean(losses)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(logits_out, 1), tf.cast(y_output, tf.int64)), tf.float32))
optimizer = tf.train.RMSPropOptimizer(learning_rate)
train_step = optimizer.minimize(loss)
init = tf.global_variables_initializer()
sess.run(init)
train_loss = []
test_loss = []
train_accuracy = []
test_accuracy = []
# Start training
for epoch in range(epochs):
# Shuffle training data
shuffled_ix = np.random.permutation(np.arange(len(x_train)))
# print(shuffled_ix)
# print(shuffled_ix.shape[0])
x_train = x_train[shuffled_ix]
y_train = y_train[shuffled_ix]
num_batches = int(len(x_train)/batch_size)
# TO DO CALCULATE GENERATIONS ExACTLY
for i in range(num_batches):
# Select train data
min_ix = i * batch_size
max_ix = np.min([len(x_train), ((i+1) * batch_size)])
x_train_batch = x_train[min_ix:max_ix]
y_train_batch = y_train[min_ix:max_ix]
#print(x_train_batch)
# print(x_train_batch.shape[0])
# print(x_train_batch.shape[1])
# print(x_train_batch.shape[2])
# print(x_train_batch[0][0])
# Run train step
train_dict = {x_data: x_train_batch, y_output: y_train_batch, dropout_keep_prob:0.5}
sess.run(train_step, feed_dict=train_dict)
# Run loss and accuracy for training
temp_train_loss, temp_train_acc = sess.run([loss, accuracy], feed_dict=train_dict)
train_loss.append(temp_train_loss)
train_accuracy.append(temp_train_acc)
# Run Eval Step
test_dict = {x_data: x_test, y_output: y_test, dropout_keep_prob:1.0}
temp_test_loss, temp_test_acc = sess.run([loss, accuracy], feed_dict=test_dict)
test_loss.append(temp_test_loss)
test_accuracy.append(temp_test_acc)
print('Epoch: {}, Test Loss: {:.2}, Test Acc: {:.2}'.format(epoch+1, temp_test_loss, temp_test_acc))
# epoch_seq = np.arange(1, epochs+1)
# plt.plot(epoch_seq, train_loss, 'k--', label='Train Set')
# plt.plot(epoch_seq, test_loss, 'r-', label='Test Set')
# plt.title('Softmax Loss')
# plt.xlabel('Epochs')
# plt.ylabel('Softmax Loss')
# plt.legend(loc='upper left')
# plt.show()
# # Plot accuracy over time
# plt.plot(epoch_seq, train_accuracy, 'k--', label='Train Set')
# plt.plot(epoch_seq, test_accuracy, 'r-', label='Test Set')
# plt.title('Test Accuracy')
# plt.xlabel('Epochs')
# plt.ylabel('Accuracy')
# plt.legend(loc='upper left')
# plt.show()
# In[12]:
import jieba
all_data=[]
for i in range(2000):
seg_list=jieba.cut(data_all[i])
seg_list=" ".join(seg_list)
all_data.append(seg_list)
print(all_data[0])
print(all_data[1999])
print(type(all_data[0]))
print(data_all[0])
print(data_all[1999])
print(type(data_all[0]))
# In[13]:
for i in range(2000):
all_data[i]=all_data[i].split()
print(all_data[100])
# In[14]:
import gensim
model = gensim.models.Word2Vec.load("C:\\LAWHCA\\word2vec\\word2vec_wx")
print(model.most_similar(u'宾馆'))
# In[15]:
MAX=0
res=0
for i in range(2000):
if MAX< len(all_data[i]):
res=i
MAX=len(all_data[i])
print(MAX)
def sentence_to_array(sentence,MAX):
ret=[]
import numpy as np
zero=np.zeros((256))
for i in sentence:
try:
ret.append(model.wv[i])
except Exception as err:
ret.append(zero)
for i in range(MAX-len(sentence)):
ret.append(zero)
return ret
res=[]
for i in range(2000):
res.append(sentence_to_array(all_data[i],MAX))
print(i)
print(res[0])
# In[16]:
res=np.array(res)
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense, Activation, Convolution2D, MaxPooling2D, Flatten
from keras.optimizers import Adam
import pickle
X_train,X_test=res[:1500],res[1500:]
y_train,y_test=label[:1500],label[1500:]
# In[3]:
print()
print(X_train.shape)
y_train = np_utils.to_categorical(y_train, 2)
y_test = np_utils.to_categorical(y_test, 2)
# In[4]:
print(X_train.shape)
print(X_test.shape)
X_train = X_train.reshape(1500, 1,MAX, 256)
X_test = X_test.reshape(500, 1,MAX, 256)
# In[5]:
print(X_train.shape)
print(y_train.shape)
# In[17]:
# Another way to build your CNN
model = Sequential()
# Conv layer 1 output shape (32, 28, 28)
model.add(Convolution2D(
nb_filter=32,
nb_row=5,
nb_col=5,
border_mode='same', # Padding method
dim_ordering='tf', # if use tensorflow, to set the input dimension order to theano ("th") style, but you can change it.
input_shape=(1, # channels
MAX, 256,) # height & width
))
model.add(Activation('relu'))
# Pooling layer 1 (max pooling) output shape (32, 14, 14)
model.add(MaxPooling2D(
pool_size=(2, 2),
strides=(2, 2),
border_mode='same', # Padding method
))
# Conv layer 2 output shape (64, 14, 14)
model.add(Convolution2D(64, 5, 5, border_mode='same'))
model.add(Activation('relu'))
# Pooling layer 2 (max pooling) output shape (64, 7, 7)
model.add(MaxPooling2D(pool_size=(2, 2), border_mode='same'))
model.add(Convolution2D(128,5,5, border_mode='same'))
model.add(Activation('relu'))
# Pooling layer 2 (max pooling) output shape (64, 7, 7)
model.add(MaxPooling2D(pool_size=(2, 2), border_mode='same'))
# Fully connected layer 1 input shape (64 * 7 * 7) = (3136), output shape (1024)
model.add(Flatten())
model.add(Dense(1024))
model.add(Activation('relu'))
# Fully connected layer 2 to shape (10) for 10 classes
model.add(Dense(2))
model.add(Activation('softmax'))
# Another way to define your optimizer
adam = Adam(lr=1e-4)
# We add metrics to get more results you want to see
model.compile(optimizer=adam,
loss='categorical_crossentropy',
metrics=['accuracy'])
print('Training ------------')
# Another way to train the model
model.fit(X_train, y_train, batch_size=50,nb_epoch=11)
print('\nTesting ------------')
# Evaluate the model with the metrics we defined earlier
loss, accuracy = model.evaluate(X_test, y_test)
print('\ntest loss: ', loss)
print('\ntest accuracy: ', accuracy)
# In[ ]:
| [
"[email protected]"
] | |
a2ae05fd5bbe201b0dba0773a9f38d0f0e6d8898 | 80664c3c78e6381495db99074d38a4bc3971e2b7 | /MovieEngine/webapp/urls.py | 4f59afca6c66e5d82401a5f7ce24d693386b67d9 | [] | no_license | nebulazee/MovieEngine | eb3c12f12b836fd8742822414e9b1343ca908512 | 6098bbabc44c392bb9327348bbcfe960a44ef368 | refs/heads/master | 2020-03-22T00:39:24.824954 | 2018-06-30T15:05:41 | 2018-06-30T15:17:37 | 139,257,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.mainPage, name='mainPage'),
#url(r'^mainPage/$', views.mainPage, name='mainPage'),
url(r'^goToRecom/controller$',views.movieRecom,name='movieRecom'),
url(r'^goToRecom/$',views.index,name='index'),
] | [
"[email protected]"
] | |
f427a26d36f2fffc1aa1e50c709b2983234801cf | 20336a03aa918460aeb6ca7809dfc57673b27291 | /backend/migrations/0014_lesson_cours.py | dd16057b29946586f4cdd0defb6340c80657a129 | [] | no_license | shevchukmaxim/diplom | 95076b9a89ed37c56f6c5348b51971c68d36f705 | 3043eb39b5479e248e939ecbc4084f2bde960101 | refs/heads/master | 2022-12-09T07:16:56.276213 | 2019-07-20T10:30:42 | 2019-07-20T10:30:42 | 192,128,893 | 0 | 0 | null | 2022-12-03T13:50:28 | 2019-06-15T22:26:57 | CSS | UTF-8 | Python | false | false | 559 | py | # Generated by Django 2.2.1 on 2019-06-02 03:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('backend', '0013_cours_coursemployee_courslesson'),
]
operations = [
migrations.AddField(
model_name='lesson',
name='cours',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='courslsn', related_query_name='courslsn', to='backend.Cours'),
),
]
| [
"[email protected]"
] | |
14c639cb0326a7c2cf37b94840c0031c0ed31417 | 023d8b4d847969e7ec67025ed469bbeaeed82899 | /hello.py | 4541e874c4452b89efbd8554f69ab30ac4836556 | [] | no_license | beardTao/microblog | 479a0491a780ad0b2e7dc815754ec8c37ac76c54 | 3af95bd44e6924515225c9e79788d9dd3ac579e5 | refs/heads/master | 2022-11-07T18:08:05.906171 | 2019-04-30T03:48:17 | 2019-04-30T03:48:17 | 183,625,180 | 1 | 1 | null | 2022-11-02T06:36:15 | 2019-04-26T12:32:03 | Python | UTF-8 | Python | false | false | 3,190 | py | from flask import Flask, render_template, url_for, request, session, redirect, flash
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from datetime import datetime
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
import os
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail, Message
from threading import Thread
basedir = os.path.abspath(os.path.dirname(__file__))
print(basedir)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'hard to guess string'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(basedir,'data2.splite')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
#配置邮箱信息
app.config['MAIL_SERVER'] = 'smtp.163.com'
app.config['MAIL_PORT'] = 25
app.config['MAIL_USERNAME'] = os.environ.get('mail_username')
app.config['MAIL_PASSWORD'] = os.environ.get('mail_password')
print(app.config['MAIL_USERNAME'],app.config['MAIL_PASSWORD'])
app.config['FLASKY_MAIL_SUBJECT_PREFIX'] = '[TAO]'
app.config['FLASKY_MAIL_SENDER'] = '[email protected]'
bootstrap = Bootstrap(app)
moment = Moment(app)
db = SQLAlchemy(app)
migrate =Migrate(app, db)
mail = Mail(app)
def send_async_mail(app, msg):
with app.app_context():
mail.send(msg)
def send_mail(to, subject, template, **kwargs):
msg = Message(app.config['FLASKY_MAIL_SUBJECT_PREFIX'] + subject, sender=app.config['FLASKY_MAIL_SENDER'], recipients=[to])
msg_body = render_template(template + '.txt',**kwargs)
msg.html = render_template(template + '.html',**kwargs)
thr = Thread(target=send_async_mail, args=[app, msg])
thr.start()
return thr
class NameForm(FlaskForm):
name = StringField('name:', validators=[DataRequired()])
submit = SubmitField('Submit')
class Role(db.Model):
__tablename__ = 'roles'#表名
id = db.Column(db.Integer, primary_key=True)#表属性
name = db.Column(db.String(64), unique=True)
users = db.relationship('User', backref='role')
def __repr__(self):
return '<Role %r>' % self.name
class User(db.Model):
__tablename__ = 'users'#表名
id = db.Column(db.Integer, primary_key=True)#表属性
username = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
def __repr__(self):
return '<User %r>' % self.username
@app.route('/',methods=['GET','POST'])
def index():
form = NameForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.name.data).first()
print(user)
if user is None:
user = User(username=form.name.data)
db.session.add(user)
db.session.commit()
session['known'] = False
send_mail('[email protected]', "new_user", 'mail/new_user', user=user)
else:
session['known'] = True
# old_name = session.get('name')
# if old_name != form.name.data:
# flash('looks like you have changed your name')
session['name'] = form.name.data
form.name.data = ''
return redirect(url_for('index'))
return render_template('index.html',form=form, name=session.get('name'),known=session.get('known',False))
@app.route('/user/<name>')
def user(name):
return render_template('user.html',name=name)
| [
"170601"
] | 170601 |
15ac7a012158192d1c75ea2adf14451862b089f5 | c475cd8531a94ffae69cc92371d41531dbbddb6c | /Projects/bullet3-2.89/examples/pybullet/gym/pybullet_utils/util.py | 5a014c8ed07a8b530a0fc58ca5cf708b435bd654 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown",
"Zlib"
] | permissive | WolfireGames/overgrowth | 72d3dd29cbd7254337265c29f8de3e5c32400114 | 594a2a4f9da0855304ee8cd5335d042f8e954ce1 | refs/heads/main | 2023-08-15T19:36:56.156578 | 2023-05-17T08:17:53 | 2023-05-17T08:20:36 | 467,448,492 | 2,264 | 245 | Apache-2.0 | 2023-05-09T07:29:58 | 2022-03-08T09:38:54 | C++ | UTF-8 | Python | false | false | 218 | py | import random
import numpy as np
def set_global_seeds(seed):
try:
import tensorflow as tf
except ImportError:
pass
else:
tf.set_random_seed(seed)
np.random.seed(seed)
random.seed(seed)
return
| [
"[email protected]"
] | |
4c27acbc8fa98aefd7f98f136bbc7b410c6f568a | 86406e6d1373a48291a6957fdfce07ad98a00db3 | /main.py | 798e33d263563ced19e6c0f6a09ded184e69b4f9 | [] | no_license | ewertonpaulo/knn-machine-learning | 4bbafa164c96cd2d22cfb6dc0b181a9f6f129f60 | 0060f111c6107ad6093d1ef67ce722b130cc9d2b | refs/heads/master | 2020-04-30T05:05:14.660594 | 2019-03-21T22:07:04 | 2019-03-21T22:07:04 | 176,621,930 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | from csv_handler import train_test_split
import csv_handler as cv
import knn
treinamento = 'aprendizagemdemaquina/treinamento.csv'
teste = 'aprendizagemdemaquina/teste.csv'
r = 'aprendizagemdemaquina/rotulos-teste.txt'
train = cv.train_test_split(treinamento)
train = knn.data_to_list(train)
validate = cv.validacao(teste)
predic = []
rotulos = []
for i in cv.rotulos_testes(r):
rotulos.append(float(i[0]))
k = 2
for x in range(len(validate)):
vizinhos = knn.get_vizinhos(train, validate[x], k)
resultado = knn.resposta(vizinhos)
predic.append(resultado)
print(resultado)
print('Precisao: %f' %knn.precisao(rotulos, predic)) | [
"[email protected]"
] | |
503417c9e5d75f60410ccc6b53f26b6e6bac8d5b | 78fe8c800da8eaa3b5644347b9a06145a4f43a6e | /xomain/game/migrations/0005_auto_20181124_2250.py | 78d2df740d8612669e3a264c225ebecb6de4d05d | [] | no_license | AakashKhatu/xodjango | e37393ba7c09a31c06a162bb79f94a9bd53bbbb1 | 240b4a470581bea95286335b3c0049c998c2e924 | refs/heads/master | 2020-04-08T01:27:52.216974 | 2019-01-03T02:32:49 | 2019-01-03T02:32:49 | 158,896,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | # Generated by Django 2.0.9 on 2018-11-24 17:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('game', '0004_auto_20181124_2150'),
]
operations = [
migrations.AddField(
model_name='game_data',
name='player1',
field=models.CharField(default='player1_id', max_length=20),
),
migrations.AddField(
model_name='game_data',
name='player2',
field=models.CharField(default='player2_id', max_length=20),
),
migrations.AlterField(
model_name='game_data',
name='player1_name',
field=models.CharField(default='player1_name', max_length=20),
),
migrations.AlterField(
model_name='game_data',
name='player2_name',
field=models.CharField(default='player2_name', max_length=20),
),
]
| [
"[email protected]"
] | |
816a9237dc7938a0b5c52aa4309713b2228816f7 | adf3076bd40e37f4a422e79f6efb938f13def6c6 | /objectModel/Python/cdm/storage/local.py | e8a47c16802f5603d8c1d96895365a7d49ac8a07 | [
"MIT",
"CC-BY-4.0"
] | permissive | assetdatasystems/CDM | 445d1b22f0071620f1eb2fd8d1b5f7d6152ec388 | 576ccfd07fc718b3d0911112e5041729a3ba8088 | refs/heads/master | 2020-09-29T00:54:18.350717 | 2019-12-06T22:49:02 | 2019-12-06T22:49:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,521 | py | # ----------------------------------------------------------------------
# Copyright (c) Microsoft Corporation.
# All rights reserved.
# ----------------------------------------------------------------------
from datetime import datetime
import json
import os
from typing import Any, List, Optional
from .base import StorageAdapterBase
class LocalAdapter(StorageAdapterBase):
"""Local file system storage adapter"""
def __init__(self, root: str = '') -> None:
self._root = os.path.abspath(root) # type: str
def can_read(self) -> bool:
return True
def can_write(self) -> bool:
return True
async def read_async(self, corpus_path: str) -> str:
adapter_path = self.create_adapter_path(corpus_path)
with open(adapter_path, 'r', encoding='utf-8') as file:
return file.read()
async def write_async(self, corpus_path: str, data: str) -> None:
adapter_path = self.create_adapter_path(corpus_path)
parent_dir = os.path.abspath(os.path.join(adapter_path, os.pardir))
os.makedirs(parent_dir, exist_ok=True)
with open(adapter_path, 'w', encoding='utf-8') as file:
file.write(data)
def create_adapter_path(self, corpus_path: str) -> str:
corpus_path = corpus_path[(corpus_path.find(':') + 1):].lstrip('\\/')
return os.path.normpath(os.path.join(self._root, corpus_path))
def create_corpus_path(self, adapter_path: str) -> Optional[str]:
if not adapter_path.startswith("http"):
normalized_adapter_path = os.path.abspath(adapter_path).replace('\\', '/')
normalized_root = self._root.replace('\\', '/')
if normalized_adapter_path.startswith(normalized_root):
return normalized_adapter_path[len(normalized_root):]
# Signal that we did not recognize path as one for this adapter.
return None
def clear_cache(self) -> None:
pass
async def compute_last_modified_time_async(self, adapter_path: str) -> Optional[datetime]:
if os.path.exists(adapter_path):
return datetime.fromtimestamp(os.path.getmtime(adapter_path))
return None
async def fetch_all_files_async(self, folder_corpus_path: str) -> List[str]:
adapter_folder = self.create_adapter_path(folder_corpus_path)
adapter_files = [os.path.join(dp, fn) for dp, dn, fns in os.walk(adapter_folder) for fn in fns]
return [self.create_corpus_path(file) for file in adapter_files]
| [
"[email protected]"
] | |
b0dcca8c35d97cf96c6a426d4cd4e0c4f1757ab5 | b7125b27e564d2cc80a2ce8d0a6f934aa22c8445 | /.history/sudoku_20201031012809.py | 1e1bd31e61f1cca07054e01ff4e65dab7e7033db | [] | no_license | JensVL96/Puzzle-solver-for-fun | 4c15dcd570c3705b7ac555efb56b52913e81083c | 6d8a4378a480372213a596a336a4deca727a00fc | refs/heads/master | 2021-07-15T05:19:42.185495 | 2020-11-08T13:59:49 | 2020-11-08T13:59:49 | 224,855,888 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,215 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
from config import *
from create_board import *
from solve_bloard import *
from display_board import *
from string import *
import pygame as pg
import numpy as np
# For error highlighting
row_index = (0, 0)
col_index = (0, 0)
blk_index = (0, 0)
input_lock = 0
def reset_errors():
global input_lock
input_lock = 1
global row_index
row_index = (0, 0)
global col_index
col_index = (0, 0)
global blk_index
blk_index = (0, 0)
def get_cord(pos):
global box_index_x
box_index_x = (pos[0] - TOP_LX)//BLOCK_SIZE
global box_index_y
box_index_y = (pos[1] - TOP_LY)//BLOCK_SIZE
def valid(grid, x, y, val, increase):
global input_lock
for index in range(9):
# Check if value in column
if grid[x][index] == val:
print("in the same column")
global col_index
col_index = (x, index)
input_lock = 1
# Check if value in row
if grid[index][y] == val:
print("in the same row")
global row_index
row_index = (index, y)
input_lock = 1
# Finds the block
index_x = x // 3 # integer division
index_y = y // 3
# Check if value in block
for i in range(index_x * 3, index_x * 3 + 3):
for j in range (index_y * 3, index_y * 3 + 3):
if grid[i][j] == val:
print("in the same block")
global blk_index
blk_index = (i, j)
input_lock = 1
if input_lock == 1:
return False
return True
class Main():
def __init__(self):
self.board = []
self.run()
def run(self):
pg.init()
self.screen = pg.display.set_mode(SCREEN_RES)
pg.display.set_caption('Sudoku solver')
display = Display_board(self.screen)
flag1 = 0
val = 0
global input_lock
board = create_board().board
while 1:
for event in pg.event.get():
if event.type == pg.QUIT or (event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE):
exit()
if event.type == pg.MOUSEBUTTONDOWN:
flag1 = 1
pos = pg.mouse.get_pos()
get_cord(pos)
display.glow(pos)
if event.type == pg.KEYDOWN and input_lock != 1:
if event.key == pg.K_1:
val = 1
if event.key == pg.K_2:
val = 2
if event.key == pg.K_3:
val = 3
if event.key == pg.K_4:
val = 4
if event.key == pg.K_5:
val = 5
if event.key == pg.K_6:
val = 6
if event.key == pg.K_7:
val = 7
if event.key == pg.K_8:
val = 8
if event.key == pg.K_9:
val = 9
elif event.type == pg.KEYDOWN and input_lock == 1:
if event.key == pg.K_BACKSPACE:
val = 0
input_lock = 0
reset_errors()
if val != 0:
display.draw_val(val, box_index_x, box_index_y)
if valid(board, int(box_index_x), int(box_index_y), val, display):
board[int(box_index_x)][int(box_index_y)] = val
else:
board[int(box_index_x)][int(box_index_y)] = 0
val = 0
pg.draw.rect(self.screen, BLACK, (0, 0, self.screen.get_width(), self.screen.get_height()))
self.screen.fill(BEIGE)
display.draw(board)
if input_lock == 1:
display.update(board, row_index, col_index, blk_index)
# display.draw_box()
pg.display.update()
self.solution = solve_board(board)
self.solution.assign_flags(board)
if __name__ == '__main__':
Main()
| [
"[email protected]"
] | |
ade7cc11e7c987ff67017d26122fc95f878d4de8 | af45aec06c134a4e7bed79b7e542ea711da14ea2 | /home/views.py | 6dd6c6ce446e3a525b2af49626ade62613a01418 | [] | no_license | Psolmin/Likelion | e23289f9ea37bfb1944039b0b50b6369646aac0f | d622f7f06136db12e1e8c3d48866ec1b67a47fc0 | refs/heads/master | 2022-01-22T10:10:24.794874 | 2019-05-26T08:45:20 | 2019-05-26T08:45:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | from django.shortcuts import render
# Create your views here.
def intro(request):
return render(request,'intro.html') | [
"[email protected]"
] | |
ae107976d966143e9c4a31f1309885c06bf9f1ae | 64723d47ce3651e4bbb85344a284371e61d31ffa | /timeModules/creatingInstances.py | 82f2ccea8bcf96657693b7cf18b9ac227a0adcf5 | [
"MIT"
] | permissive | BrandonP321/Python-masterclass | 659674cede81a3d0b4681481cbba8f3dfdd3d996 | fac81fe4f8acfa37076820405d96132f9f23b311 | refs/heads/master | 2022-12-12T13:59:37.828859 | 2020-09-05T02:29:40 | 2020-09-05T02:29:40 | 266,238,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,271 | py | import datetime
"""These are the 3 most useful classes of the datetime module"""
date = datetime.date(year=2020, month=7, day=21)
print(f"datetime.date: {date}") # 2020-07-21
time = datetime.time(hour=11, minute=33, second=54, microsecond=348983)
print(f"datetime.time: {time}") # 11:33:54.348983
date_time = datetime.datetime(year=2020, month=7, day=21, hour=11, minute=33, second=54, microsecond=348983)
print(f"datetime.datetime: {date_time}\n") # 2020-07-21 11:33:54.398204
year, month, day, hour, minute, second, microsecond = (2020, 7, 20, 11, 33, 54, 398204)
print(datetime.datetime(year, month, day, hour, minute, second, microsecond))
print()
"""3 other ways to create instances"""
# creates instance of the current local date
today = datetime.date.today()
print(f"datetime.date.today(): {today}") # 2020-08-28
# creates instance of the current date and time
timeNow = datetime.datetime.now()
print(f"datetime.datetime.now(): {timeNow}\ttype: ", end='') # YYYY-MM-DD HH:MM:SS.SSSSSS
print(type(timeNow))
# combines instances of datetime.date & datetime.time into a sings datetime.datetime instance
current_time = datetime.time(timeNow.hour, timeNow.minute, timeNow.second, timeNow.microsecond)
combined = datetime.datetime.combine(today, current_time)
print(f"datetime.datetime.combine(<date>, <time>): {combined}\n")
"""Using a string to create an instance of datetime"""
# converts the string into a datetime.date type
dateIso = datetime.date.fromisoformat("2000-07-21")
print(f"datetime.date.fromisoformat(): {dateIso}\t", end='')
print(f"type: {type(dateIso)}")
# similar to previous
timeIso = datetime.time.fromisoformat("05:42:32.483720")
print(f"datetime.time.fromisoformat: {timeIso}", end='\t')
print(f"type: {type(timeIso)}")
# similar to previous
dateTimeIso = datetime.datetime.fromisoformat("2000-07-21 05:42:32.483720")
print(f"datetime.datetime.fromisoformat: {dateTimeIso}", end='\t')
print(f"type: {type(dateTimeIso)}")
print()
"""Handling dates & time not in the iso 8601 format"""
date_string = "01-31-2020 14:45:37"
formatted_string = "%m-%d-%Y %H:%M:%S"
date_string_final = datetime.datetime.strptime(date_string, formatted_string)
print(f"datetime.datetime.strptime(date_string, formatted_string): {date_string_final}")
| [
"[email protected]"
] | |
18cd164633ce6eeb98459085cbefb6aff3a57b65 | b7ffaf77eb0d404dcb6d1bc35de5d1a089678a32 | /methods.py | 6a76acb04a99404c6c5b3425cc9610afde789bb3 | [] | no_license | KaustubhDamania/Loan-er | 6f318452baa94cf170188fc2af35030ef8136a25 | 2b5e6932747de33f38e6de50c607c8b223262b12 | refs/heads/master | 2022-12-21T22:27:15.294804 | 2020-06-01T22:01:24 | 2020-06-01T22:01:24 | 193,385,674 | 3 | 1 | null | 2022-09-23T22:26:37 | 2019-06-23T19:07:32 | Python | UTF-8 | Python | false | false | 7,850 | py | import dialogflow
from emoji import emojize,demojize
from langdetect import detect_langs
import re
from pprint import pprint
from random import randint
import smtplib
from googletrans import Translator
def pan_check(pan):
from run import user_data
if len(pan)!=10:
return False
if not pan[:3].isalpha():
return False
if pan[3] not in 'PFCHAT':
return False
if pan[4] != user_data['name'].split()[-1][0]:
return False
if not pan[5:9].isdigit():
return False
if not pan[9].isalpha():
return False
return True
def get_fulfillment_texts(message, project_id, session_id):
session_client = dialogflow.SessionsClient()
print('inside function, session_id',session_id)
session = session_client.session_path(project_id, session_id) #"unique"
if message:
text_input = dialogflow.types.TextInput(text=message,
language_code='en')
query_input = dialogflow.types.QueryInput(text=text_input)
response = session_client.detect_intent(session=session,
query_input=query_input)
# print('RESPONSE')
# pprint(response)
if response:
fulfillment_msg = response.query_result.fulfillment_text
fulfillment_arr = response.query_result.fulfillment_messages
new_arr = []
for item in fulfillment_arr:
# print('fulfillment_messages',item)
new_arr.append({
'text': {
'text': [item.text.text[0]]
}
})
print('fulfillment_arr',new_arr)
# if str(fulfillment_arr[0].text.text[0]) != '':
# fulfillment_text = fulfillment_arr[0].text.text[0]
# else:
# fulfillment_text = fulfillment_msg
return fulfillment_msg, new_arr, response
def convert_to_hi(fulfillment_msg):
translator = Translator()
fulfillment_msg = demojize(fulfillment_msg)
fulfillment_msg = translator.translate(fulfillment_msg, src='en', dest='hi').text
pattern = re.compile(r':(.*?):')
emoji_indices = [m.span() for m in re.finditer(pattern,fulfillment_msg)]
# for i,j in emoji_indices:
while len(emoji_indices)>0:
i,j = emoji_indices[0]
# print('emoji',fulfillment_msg[i:j],i,j)
translated_text = translator.translate(fulfillment_msg[i:j], src='hi', dest='en').text
translated_text = translated_text[0]+translated_text[1:-1].strip().lower()+translated_text[-1]
# print('translated_text',translated_text)
translated_emoji = emojize(translated_text)
fulfillment_msg = fulfillment_msg[:i]+translated_emoji+fulfillment_msg[j:]
emoji_indices = [m.span() for m in re.finditer(pattern,fulfillment_msg)]
# print('emoji_indices',emoji_indices)
return fulfillment_msg
# def get_language(message):
# from run import isHindi
# # global isHindi
# # if isHindi:
# # return 'hi'
# language_code = 'en'
# try:
# languages = detect_langs(message)
# languages = [item.lang for item in languages]
# for lang in languages:
# if lang in ['ne','mr','hi']:
# language_code = 'hi'
# isHindi = True
# break
# except Exception as e:
# pass
# return language_code, isHindi
def calc_emi(amount, duration):
interest = duration - 2
from math import ceil
return ceil(amount*(1+interest/100)/duration)
def upload_pic(pic_name):
from firebase import firebase
firebase = firebase.FirebaseApplication('https://cabot-xuhseu.firebaseio.com')
client = storage.Client()
bucket = client.get_bucket('cabot-xuhseu.appspot.com')
# posting to firebase storage
imageBlob = bucket.blob("/")
imagePath = os.path.join(os.getcwd(),"{}".format(pic_name))
imageBlob = bucket.blob(pic_name)
imageBlob.upload_from_filename(imagePath)
# return str(imageBlob.generate_signed_url(expiration=timedelta(hours=1),
# method='GET'))
def replace_text(pattern, replacement, fulfillment_msg):
# pattern = re.compile(r'XXXX')
pattern = re.compile(pattern)
indices = [m.span() for m in re.finditer(pattern,fulfillment_msg[0]['text']['text'][0])]
indices = indices[0]
first_part = fulfillment_msg[0]['text']['text'][0][:indices[0]]
latter_part = fulfillment_msg[0]['text']['text'][0][indices[1]:]
fulfillment_msg[0]['text']['text'][0] = first_part+str(replacement)+latter_part
def get_user_data(response,intent_name,fulfillment_msg):
from run import filename, db, user_data
if intent_name=='loan':
# pprint(dir(response))
pass
elif intent_name=='get name':
print('name is',response.query_result.output_contexts[-1].parameters['name'])
user_data['name']=response.query_result.output_contexts[-1].parameters['name']
elif intent_name=='amount-1':
print('amount is',int(response.query_result.output_contexts[-1].parameters['amount']))
user_data['loan_amt']=int(response.query_result.output_contexts[-1].parameters['amount'])
elif intent_name=='loan period':
print('duration is',int(response.query_result.output_contexts[-1].parameters['duration']['amount']))
user_data['loan_duration']=int(response.query_result.output_contexts[-1].parameters['duration']['amount'])
elif intent_name=='email':
print('email is',(response.query_result.output_contexts[-1].parameters['email']))
user_data['email']=response.query_result.output_contexts[-1].parameters['email']
elif intent_name=='pan':
user_text = response.query_result.query_text
pan = response.query_result.output_contexts[-1].parameters['pan']
if pan=='':
for word in user_text.split():
if pan_check(word):
pan = word
break
print('pan is',pan)
user_data['pan'] = pan
elif intent_name=='PAN pic upload':
# upload_pic(filename)
user_data['pan_photo'] = filename
# count += 1
# os.remove(filename)
elif intent_name=='Aadhar number':
print('aadhar is',str(int(response.query_result.output_contexts[-1].parameters['aadhar'])))
user_data['aadhar_no'] = str(int(response.query_result.output_contexts[-1].parameters['aadhar']))
elif intent_name=='Aadhar pic front':
# upload_pic(filename)
user_data['aadhar_pic1'] = filename
# count += 1
# os.remove(filename)
elif intent_name=='Aadhar pic back':
# upload_pic(filename)
user_data['aadhar_pic2'] = filename
credit_ref = db.collection(u'credit_score_data')
credit_score = randint(0,900)
try:
query_result1 = credit_ref.where('pan',u'==',user_data['pan']).get()
for i in query_result1:
credit_score = i.to_dict()['credit_score']
except Exception as e:
print(e)
if credit_score < 500:
loaner = 0
else:
loaner = ((credit_score-500)/400)*int(user_data['loan_amt'])
replace_text(r'XXXX',loaner,fulfillment_msg)
replace_text(r'YY',user_data['loan_duration'],fulfillment_msg)
replace_text(r'ZZZZ',calc_emi(user_data['loan_amt'],user_data['loan_duration']),fulfillment_msg)
elif intent_name=='Loan approved - yes':
pass
elif intent_name=='Loan approved - no':
pass
elif intent_name=='Bank details':
user_text = response.query_result.query_text
user_text = user_text.split('\n')
user_data['bank_acc']=user_text[0]
user_data['ifsc']=user_text[1]
pprint(user_data)
return user_data
| [
"[email protected]"
] | |
fba992eef1e678333e3f6677e5a63d44ddcf3b6f | 8f49ce0587c060819837ce9eaa1ccfd5b7d51be5 | /scripts/ReseqTrackDB/create_collection_to_transpose.py | 850a5b21b3da9c93f187db1851fc960b4c17403b | [
"Apache-2.0"
] | permissive | igsr/igsr_analysis | 8a3f4150b976d5d73ed173f2b37179b6049028e3 | ffea4885227c2299f886a4f41e70b6e1f6bb43da | refs/heads/master | 2022-12-08T21:24:22.513499 | 2021-12-09T21:11:12 | 2021-12-09T21:11:12 | 99,792,445 | 3 | 5 | Apache-2.0 | 2022-12-08T02:26:22 | 2017-08-09T09:48:48 | Jupyter Notebook | UTF-8 | Python | false | false | 2,763 | py | '''
Utility to create a new entry in the Reseqtrackdb's Collection table that will be associated to a certain type of files
'''
from ReseqTrackDB import *
import argparse
import logging
import os
#RESEQTRACK DB conn params
parser = argparse.ArgumentParser(description='Script to calculate different QC metrics on BAMs')
parser.add_argument('--hostname', type=str, required=True, help='Hostname for ReseqTrack DB' )
parser.add_argument('--username', type=str, required=True, help='User for ReseqTrack DB' )
parser.add_argument('--port', type=int, required=True, help='Port number in the ReseqTrack DB' )
parser.add_argument('--pwd', type=str, help='PWD for the ReseqTrack DB' )
parser.add_argument('--db', type=str, required=True, help='DB name in the ReseqTrack DB' )
parser.add_argument('--ftype_1', type=str, required=True, help='File type1 that will be used to generate the new collection' )
parser.add_argument('--ftype_2', type=str, required=True, help='File type2 that will be used to generate the new collection' )
parser.add_argument('--ftype_3', type=str, required=False, help='File type3 that will be used to generate the new collection' )
parser.add_argument('--collection_type', type=str, required=True, help='Type for the new collection' )
parser.add_argument('--collection_name', type=str, required=True, help='Name for the new collection' )
args = parser.parse_args()
def get_files_by_type(reseqdb,type):
'''
Parameters
----------
reseqdb : ReseqTrackDB object, Required
type : str, Required
File type to retrieve
Returns
-------
A list with dbIDs of the files
'''
l=reseqdb.fetch_files_by_type(type)
return [x.dbID for x in l]
if __name__ == '__main__':
log_filename="create_collection_to_transpose.log"
logger = logging.getLogger("col_2_transpose")
logger.setLevel(logging.INFO)
# create the logging file handler
fh = logging.FileHandler(log_filename)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
# add handler to logger object
logger.addHandler(fh)
logger.info("Program started")
hostname=args.hostname
username=args.username
db=args.db
port=args.port
pwd=args.pwd
reseqdb = ReseqTrackDB(host=hostname,user=username,port=port,pwd=pwd,db=db)
l1=get_files_by_type(reseqdb,args.ftype_1)
l2=get_files_by_type(reseqdb,args.ftype_2)
l3=[]
if args.ftype_3:
l3=get_files_by_type(reseqdb,args.ftype_3)
others_ids=list(set(l1+l2+l3))
new_c=Collection(name=args.collection_name,type=args.collection_type,others_dbIDs=others_ids,table_name='file')
new_c.store(reseqdb)
logger.info("Done")
| [
"[email protected]"
] | |
45b730970e78694e953764270577663bc0f55163 | 0d58801c59c80eef7468690239d70327708cdbae | /Mapping/Map.py | 7cc0ec6c0e7353c4de7dd8bcb6b8518447308422 | [] | no_license | mothwal/Python_Dictionary | 4f6fd821365a8521222fd1d73b2de2ec61ab0a6e | b310b0f0c53828d0d89d2aed1790f005d60fcb00 | refs/heads/main | 2023-04-29T04:00:14.015992 | 2021-05-06T12:39:07 | 2021-05-06T12:39:07 | 364,276,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | import folium
import pandas as pd
data = pd.read_csv("Volcanoes.txt")
lat = list(data["LAT"])
lon = list(data["LON"])
elev = list(data["ELEV"])
def color_producer(elevation):
if elevation < 1000:
return 'green'
elif 1000 <= elevation < 3000:
return 'Orange'
else:
return 'red'
map = folium.Map(location= [38.58, -99.09], zoom_start=6, tiles = "Stamen Terrain")
fg = folium.FeatureGroup(name="My Map")
for lt, ln, el in zip(lat, lon, elev):
fg.add_child(folium.CircleMarker(location=[lt, ln], popup= str(el)+"m",
fill_color = color_producer(el),color ='grey', fill_opacity = 0.7))
fg.add_child(folium.GeoJson( data = open(('world.json'), 'r', encoding='utf-8-sig'),
style_function=lambda x: {'fillColor':'green' if x['properties']['POP2005'] < 10000000 else 'orange' if 10000000<= x['properties']['POP2005'] < 20000000 else 'red'}))
map.add_child(fg)
map.save("Map2.html")
| [
"[email protected]"
] | |
cba8befb497e82e42cf5aee10456f152d1264580 | 23199315118c958279de4b9f7cb1e4443b518c46 | /Jiji/asgi.py | ba0f84253310e5d290f16ff3550d480b40277e38 | [] | no_license | Robinzulu/Jiji-Api | 06e05198172ca779c06a498b7f41183a23784772 | 47a981af8ef19c8507321723ee45f419451dbb1c | refs/heads/master | 2023-04-30T22:38:02.766330 | 2020-10-27T15:30:17 | 2020-10-27T15:30:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """
ASGI config for Jiji project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Jiji.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
120f170cf018653194adb4e24abad7bac1b97950 | 5cbbeb11fb1400019690d10db24b4579f97e8896 | /mlkernels/kernels/derivative.py | a0d74a40a20655d7822642c6d104d4e38e97fc0c | [
"MIT"
] | permissive | darsh8200/mlkernels | b735c213f5cf590cabebcee166e3b4aea95c4e1e | cad223c422a32bc10375358fda076645efca62f1 | refs/heads/main | 2023-06-16T19:48:37.056247 | 2021-07-09T06:18:39 | 2021-07-09T06:18:39 | 384,340,711 | 0 | 0 | MIT | 2021-07-09T06:16:48 | 2021-07-09T06:16:47 | null | UTF-8 | Python | false | false | 6,853 | py | import lab as B
import numpy as np
from algebra import DerivativeFunction
from algebra.util import identical
from matrix import Dense
from plum import convert
from . import _dispatch
from .. import Kernel
from ..util import num_elements, uprank, expand
__all__ = ["perturb", "DerivativeKernel"]
def dkx(k_elwise, i):
"""Construct the derivative of a kernel with respect to its first
argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Derivative of the kernel with respect to its first argument.
"""
@uprank
def _dkx(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
# Get the numbers of inputs.
nx = num_elements(x)
ny = num_elements(y)
# Copy the input `ny` times to efficiently compute many derivatives.
xis = tf.identity_n([x[:, i : i + 1]] * ny)
t.watch(xis)
# Tile inputs for batched computation.
x = B.tile(x, ny, 1)
y = B.reshape(B.tile(y, 1, nx), ny * nx, -1)
# Insert tracked dimension, which is different for every tile.
xi = B.concat(*xis, axis=0)
x = B.concat(x[:, :i], xi, x[:, i + 1 :], axis=1)
# Perform the derivative computation.
out = B.dense(k_elwise(x, y))
grads = t.gradient(out, xis, unconnected_gradients="zero")
return B.concat(*grads, axis=1)
return _dkx
def dkx_elwise(k_elwise, i):
"""Construct the element-wise derivative of a kernel with respect to
its first argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Element-wise derivative of the kernel with respect to its
first argument.
"""
@uprank
def _dkx_elwise(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
xi = x[:, i : i + 1]
t.watch(xi)
x = B.concat(x[:, :i], xi, x[:, i + 1 :], axis=1)
out = B.dense(k_elwise(x, y))
return t.gradient(out, xi, unconnected_gradients="zero")
return _dkx_elwise
def dky(k_elwise, i):
"""Construct the derivative of a kernel with respect to its second
argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Derivative of the kernel with respect to its second argument.
"""
@uprank
def _dky(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
# Get the numbers of inputs.
nx = num_elements(x)
ny = num_elements(y)
# Copy the input `nx` times to efficiently compute many derivatives.
yis = tf.identity_n([y[:, i : i + 1]] * nx)
t.watch(yis)
# Tile inputs for batched computation.
x = B.reshape(B.tile(x, 1, ny), nx * ny, -1)
y = B.tile(y, nx, 1)
# Insert tracked dimension, which is different for every tile.
yi = B.concat(*yis, axis=0)
y = B.concat(y[:, :i], yi, y[:, i + 1 :], axis=1)
# Perform the derivative computation.
out = B.dense(k_elwise(x, y))
grads = t.gradient(out, yis, unconnected_gradients="zero")
return B.transpose(B.concat(*grads, axis=1))
return _dky
def dky_elwise(k_elwise, i):
"""Construct the element-wise derivative of a kernel with respect to
its second argument.
Args:
k_elwise (function): Function that performs element-wise computation
of the kernel.
i (int): Dimension with respect to which to compute the derivative.
Returns:
function: Element-wise derivative of the kernel with respect to its
second argument.
"""
@uprank
def _dky_elwise(x, y):
import tensorflow as tf
with tf.GradientTape() as t:
yi = y[:, i : i + 1]
t.watch(yi)
y = B.concat(y[:, :i], yi, y[:, i + 1 :], axis=1)
out = B.dense(k_elwise(x, y))
return t.gradient(out, yi, unconnected_gradients="zero")
return _dky_elwise
def perturb(x):
"""Slightly perturb a tensor.
Args:
x (tensor): Tensor to perturb.
Returns:
tensor: `x`, but perturbed.
"""
dtype = convert(B.dtype(x), B.NPDType)
if dtype == np.float64:
return 1e-20 + x * (1 + 1e-14)
elif dtype == np.float32:
return 1e-20 + x * (1 + 1e-7)
else:
raise ValueError(f"Cannot perturb a tensor of data type {B.dtype(x)}.")
class DerivativeKernel(Kernel, DerivativeFunction):
"""Derivative of kernel."""
@property
def _stationary(self):
# NOTE: In the one-dimensional case, if derivatives with respect to both
# arguments are taken, then the result is in fact stationary.
return False
@_dispatch
def __eq__(self, other: "DerivativeKernel"):
identical_derivs = identical(expand(self.derivs), expand(other.derivs))
return self[0] == other[0] and identical_derivs
@_dispatch
def pairwise(k: DerivativeKernel, x: B.Numeric, y: B.Numeric):
i, j = expand(k.derivs)
k = k[0]
# Prevent that `x` equals `y` to stabilise nested gradients.
y = perturb(y)
if i is not None and j is not None:
# Derivative with respect to both `x` and `y`.
return Dense(dky(dkx_elwise(elwise(k), i), j)(x, y))
elif i is not None and j is None:
# Derivative with respect to `x`.
return Dense(dkx(elwise(k), i)(x, y))
elif i is None and j is not None:
# Derivative with respect to `y`.
return Dense(dky(elwise(k), j)(x, y))
else:
raise RuntimeError("No derivative specified.")
@_dispatch
def elwise(k: DerivativeKernel, x: B.Numeric, y: B.Numeric):
i, j = expand(k.derivs)
k = k[0]
# Prevent that `x` equals `y` to stabilise nested gradients.
y = perturb(y)
if i is not None and j is not None:
# Derivative with respect to both `x` and `y`.
return dky_elwise(dkx_elwise(elwise(k), i), j)(x, y)
elif i is not None and j is None:
# Derivative with respect to `x`.
return dkx_elwise(elwise(k), i)(x, y)
elif i is None and j is not None:
# Derivative with respect to `y`.
return dky_elwise(elwise(k), j)(x, y)
else:
raise RuntimeError("No derivative specified.")
| [
"[email protected]"
] | |
076b6e61d9658865cb0085c03eaec021e9efa0cb | 87ed123c2ae25e60199b8c70f2160face10304e6 | /RegularExpression/subn.py | fb7b8c1f5a70391589a457af520ec212f834e1d8 | [] | no_license | asatav/Python-All-Example | 53ace669a90aa6ca39683eecadaf298d183b0bff | 005be5cd7df87a05d9fbd1e2e56ab319161b4533 | refs/heads/master | 2022-12-16T03:36:39.745041 | 2020-08-02T15:58:05 | 2020-08-02T15:58:05 | 229,724,176 | 0 | 1 | null | 2022-08-10T19:32:44 | 2019-12-23T09:54:01 | Python | UTF-8 | Python | false | false | 123 | py | import re
l=re.subn("[a-z]","#","a7b9c5kz")
print(l)
print("The result String:",l[0])
print("The no of replacement:",l[1]) | [
"[email protected]"
] | |
544fbfcefebaf2212fecdc24fdd4bd2a6e8b82b9 | 6889b20eace976fc2402ddab15cfa275dd576a0e | /project-dog-classification/create_lst.py | bba6ce9e15a9e207fcb0f990867ffcfaee52cd1d | [
"MIT"
] | permissive | NoxMoon/deep-learning-v2-pytorch | 419bf83450b5cc5675d6b051b0b2f28b2fed0ca3 | 21e802e2829655e1c30f5283cf4093e584a44cbf | refs/heads/master | 2022-09-29T21:35:36.742341 | 2020-06-07T13:37:38 | 2020-06-07T13:37:38 | 264,543,235 | 2 | 0 | null | 2020-05-16T22:59:27 | 2020-05-16T22:59:26 | null | UTF-8 | Python | false | false | 271 | py | import glob
import os
import sys
folder = sys.argv[1]
files = glob.glob(f"{folder}/*/*")
files.sort()
f = open(f"{folder}/image.lst", "w")
for i, path in enumerate(files):
file = path[len(folder)+1:]
cls = int(file[:3])-1
f.write(f"{i}\t{cls}\t{file}\n")
| [
"[email protected]"
] | |
5a88186576cde5a7a08dd9095037fba001566c39 | 8e1b0feacab1d890745de37eb3db627e48f048d9 | /Stack_Plot.py | 15ca1d69c30936a9b3b331c18353de8f9e5a8282 | [] | no_license | binary13/Python_Web | 11831aebfddc96a4840797826e2413e097f9776e | 6f44d2102b9728cf0db0c0bb57bc267ac225ef03 | refs/heads/master | 2021-01-10T14:10:00.100334 | 2016-03-02T16:16:34 | 2016-03-02T16:16:34 | 48,897,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | import matplotlib.pyplot as plt
year = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
## Expenses, in thousands
taxes = [17, 19, 44, 43, 9, 8, 12, 51, 23, 40]
overhead = [18, 7, 12, 48, 23, 34, 64, 31, 12, 8]
entertainment = [20, 14, 32, 17, 31, 21, 22, 35, 24, 6]
plt.plot([], [], color='m', label='Taxes')
plt.plot([], [], color='y', label='Overhead')
plt.plot([], [], color='c', label='Entertainment')
plt.title('Company Expenses')
plt.xlabel('Years since 2004')
plt.ylabel('Thousands of dollars')
plt.legend()
plt.stackplot(year, taxes, overhead, entertainment, colors=['m','y','c'])
plt.show() | [
"[email protected]"
] | |
5ae1f6af1af35b0b0ee824897d46848a1289d1da | fbf8acc1cb6e4e20609266865f98dc66d6ea056e | /Zesterp_Debranding_v11/__manifest__.py | 737909cab18f400f5e1c15e04a0d61a609744bc4 | [] | no_license | eLBati/OdooV11-Modules01 | b200477216adfd446b1e8c69d05e923ffe19d70f | 921baeb850dcd4d37fedf132e1c86dd3602ec5ef | refs/heads/master | 2020-04-20T07:20:04.275007 | 2017-12-22T14:16:41 | 2017-12-22T14:16:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | # -*- coding: utf-8 -*-
{
'name': "Zest Erp debranding ",
'version': '10.1.0',
'author': 'Teckzilla Software Solutions and Services',
"support": "[email protected]",
'category': 'Debranding',
'depends': [
'web',
'mail',
'web_settings_dashboard',
'website',
'project',
#'pos_odoo_debranding',
#'website_odoo_debranding',
'web_planner'
# 'access_apps',
# 'access_settings_menu',
],
'data': [
'views/webclient_templates.xml',
'views/change_menu_color.xml'
],
'qweb':[
],
'auto_install': False,
# 'uninstall_hook': 'uninstall_hook',
'installable': True
}
| [
"[email protected]"
] | |
3c2491a6d5abcc6470ee886fba3fc562b546b53a | c27995173a5cb066385e278ff4514c9c2e1a3e1c | /BinaryTree/solE226InvertBT.py | 17d510c48c59d929ab85e80c56a6c17b81331d41 | [] | no_license | jerry3links/leetcode | 0f4a184e4487ce7d6a7b655fd1934e818fe74dbf | 2b4be9d2ad2476f285dfbc7f702571015fce4d2e | refs/heads/master | 2020-04-09T21:17:57.147916 | 2019-03-20T09:30:47 | 2019-03-20T09:30:47 | 160,597,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,190 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
"""
from BinaryTree.solE226InvertBT import Solution
root = Solution().constructCase()
print("Original tree:")
Solution.printTree(root)
new_root = Solution().invertTree(root)
print("Inverted tree:")
Solution.printTree(new_root)
"""
class Solution(object):
def invertTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if root:
tmp = root.left
root.left = self.invertTree(root.right)
root.right = self.invertTree(tmp)
return root
def constructCase(self):
from customDataType import TreeNode
root = TreeNode(4)
root.left = TreeNode(2)
root.right = TreeNode(7)
root.left.left = TreeNode(1)
root.left.right = TreeNode(3)
root.right.left = TreeNode(6)
root.right.right = TreeNode(9)
return root
@staticmethod
def printTree(root):
from customDataType import TreeNode
TreeNode.printTree(root)
| [
"[email protected]"
] | |
f3d432c2fb8c4d0597be62456b9989cd7f3d6d59 | adde392c3ec37f13ff5ae35cb13556c2683bd95d | /Code/sauce.py | c2900dbd1d1164f83f9d776e04b6c397311ef7de | [] | no_license | clyman88/Robot-Arm | e442ec8ce8d2a40695f767a92fd0a1252f5a6d7a | 6b7520bfcbf5c987d893e06f07983fb6e38c7893 | refs/heads/master | 2020-12-20T08:41:03.623237 | 2020-03-24T16:04:54 | 2020-03-24T16:04:54 | 236,018,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | from time import sleep
class Sauce(object):
def __init__(self, servo, photo_interrupter):
self.servo = servo
self.photo_interrupter = photo_interrupter
servo.angle = 180
def squirt(self):
for i in range(90):
self.servo.angle = 180 - i
sleep(.0075)
self.servo.angle = 180 | [
"[email protected]"
] | |
04d910cb2753bf3da1c1f5571d04b66d68695b58 | af8c9dd86af67fcb70d4819040b7119fbc57860e | /Solver.py | 704b28a93478fb13ff891e30b9816a091dbcb299 | [] | no_license | didimelor/NewtonsMethodCalculator | aedcf3a93f12b12f14610753f09b7a76ac10992b | b14c28836bb717e14b0f143d395d18288affa260 | refs/heads/main | 2023-08-06T17:03:55.554988 | 2021-09-29T20:20:39 | 2021-09-29T20:20:39 | 411,816,953 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,086 | py | import sympy as sym
from sympy import *
import time
import numpy as np
from sympy.parsing.mathematica import mathematica
import random
x = sym.Symbol('x')
usrInput = "x**3"
expr = x**2
exprD = 2*x
def parse(usrInput):
global expr
usrInput = usrInput.replace("\\", "/")
if(usrInput.find("Log" or "log") != -1):
mathematica(usrInput)
expr = parse_expr(usrInput)
expr = simplify(expr)
def f(var):
return expr.subs(x, var)
def df(x,f):
h = .00001
upper = f(x+h)
lower = f(x)
return (upper - lower) / h
def Validation(x,f):
epsilon = .00001
norm_fx = abs(f(x))
if norm_fx < epsilon:
return ["Correcto",1,norm_fx]
else:
return ["Incorrecto",0,norm_fx]
#Clase que guarda atrubutos de cada funcion insertada para verificar y validar el funcionamiento de la pagina
class function:
def __init__(self, id, sol, it, t, acc, solved, message):
self.id = id
self.sol = sol
self.it = it
self.t = t
self.acc = acc
self.solved = solved #bool if solved
self.message = message
def Solver11(x0,f):
it = 0
maxIt = 10000
start = time.time()
x_n = x0
x_n_1 = x_n + .01
step = x_n_1-x_n
while (abs(step) > 0.00001 and it < maxIt):
it += 1
denominator = df(x_n,f)
if denominator != 0:
function_result = f(x_n)
x_n_1 = x_n - function_result / denominator
else:
x_n = 0.0
return function(usrInput, x_n, it, (round(time.time() - start,12)), 0.0, 0, "No solution found")
step = x_n_1-x_n
x_n = x_n_1
if(it > maxIt):
x_n = 0.0
return function(usrInput, x_n, 0, (round(time.time() - start,12)), 0.0, 0, "No solution found")
arr = Validation((round(x_n,12)), f)
return function(usrInput, (round(x_n,12)), it, (round(time.time() - start,12)), (round(arr[2], 12)), 1, "Solved!")
#Pruebas unitarias
def testSolver():
functions = ["(x*x - 16)",
"cos(x)- x**3",
"x + 20",
"sin(x) \ 20",
"1.0 \ x",
"x - cos(x)",
"0.3**x-x**2+4",
"tan(x)",
"x-cos(x)",
"exp(0.3*x)-x**2+4",
"x-(sqrt(x))",
"3*(x)*(x)+4*(x)-10"]
for function in functions:
parse(function)
num = random.uniform(0.9, 10.0)
num = round(num,3)
funcObj = Solver11(num,f)
print("Eq: ", function, " " ,funcObj.message, ": " ,funcObj.sol, "x0: ", num)
#Correr pruebas unitarias
#testSolver()
'''
parse("x-(sqrt(x))")
funcObj = Solver11(0.25,f)
print("Prueba 1: ", funcObj.message, ": " ,funcObj.sol, ". Acc: ", funcObj.acc)
#Pruebas del profesor:
parse("(2.718**(x**2))-1")
funcObj = Solver11(20,f)
print("Prueba 1: ", funcObj.message, ": " ,funcObj.sol, ". Acc: ", funcObj.acc)
parse("(2.718**(-0.005*x))*(1+0.005*x) - 0.5")
funcObj = Solver11(10,f)
print("Prueba 2: ", funcObj.message, ": " ,funcObj.sol, ". Acc: ", funcObj.acc)
''' | [
"[email protected]"
] | |
984ff3b520f465da1040870eef5caf08dcc3f920 | 8825b16d48255397df2ee636215d7967c615b7ac | /plural.py | 73c70d88f0896bc2c16b7786580285c406805491 | [] | no_license | Gdango/Old_Code | c7786fcb2fbc00fbbb16eeaab1f618ac6f01bfdc | f1b4c60c3a8e9b953af7dc470b02efe0a4183b37 | refs/heads/master | 2020-04-01T16:47:07.890099 | 2018-10-17T05:09:30 | 2018-10-17T05:09:30 | 153,398,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,348 | py | # Van Huynh
# ask the user for a noun and prints out its plural form
def plural():
word = input('Please enter a word: ')
new_word = 'Not'
# defining the condition when word end with f
if word[-1] == 'f':
word_temp = word[0:len(word)-1]
new_word = word_temp+'ves' # word_temp = temperary variable for word
print('The plural form of', word, 'is', new_word+'.')
return new_word
elif word[-2]+word[-1] == 'fe':
word_temp = word[0:len(word)-2] # word_temp = temperary variable for word
new_word = word_temp+'ves'
print('The plural form of', word, 'is', new_word+'.')
return new_word
# putting es and ies conditions in the same for loop
es_list = ['ch', 'sh', 's', 'x', 'z']
vowels = ['a', 'e', 'i', 'o', 'u', 'y']
for y in range(0,len(vowels)-1):
# define conditions for es
if y < 2: # for first 2 items on the es_list so then I can use word[-2]
# ch & sh have different lengths from s, x, z
if word[-2]+word[-1] == es_list[y]:
new_word = word+'es'
print('The plural form of', word, 'is', new_word+'.')
return new_word
if y >= 2:
if word[-1] == es_list[y]:
new_word = word+'es'
print('The plural form of', word ,'is', new_word+'.')
return new_word
# define conditions for s
if y < len(es_list):
if word[-2] == vowels[y]: #if the 2nd to last letter = vowels
if word[-1] == 'y':
new_word = word+'s'
print('The plural form of', word,'is', new_word+'.')
return new_word
#define conditions for 'ies'
if word[-2] != 'o' or word[-2] != 'e' or word[-2] != 'i' or word[-2] != 'a' or word[-2] != 'u':
if word[-1] == 'y':
new_word = word.replace(word[-1], 'ies')
print('The plural form of', word, 'is', new_word+'.')
return new_word
# if new_word didn't go through other ifs, it'll still equal to the pre-define...
# ...word ('Not')
if new_word == 'Not':
new_word = word+'s'
print('The plural form of', word, 'is', new_word+'.')
return new_word
plural()
| [
"[email protected]"
] | |
f1102670ae8cbd4f43677906b15c08c7982d6f38 | a7a7fe1f899a3c4f03201d11142b9cf5daeb9240 | /experimentMusic/models.py | b8fbc9fa58c826f8711327f1699d35a6fb1eb378 | [
"MIT"
] | permissive | hoomanhpa/Hooman-Experiment | fd9e78294177bdb4a4f644c3d6cdfc4f48cc1b59 | f8bd543ca3406c64878ee9d47264f46a57d121c0 | refs/heads/master | 2022-11-30T11:20:49.144335 | 2020-08-17T20:35:38 | 2020-08-17T20:35:38 | 288,278,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,913 | py | from otree.api import (
models,
widgets,
BaseConstants,
BaseSubsession,
BaseGroup,
BasePlayer,
Currency as c,
currency_range,
)
import json
import random
author = 'Your name here'
doc = """
Your app description
"""
class Constants(BaseConstants):
name_in_url = 'experimentMusic'
players_per_group = None
num_rounds = 1
with open('C:\\Files\\Test\\oTreeonlineshop-master\\shop\\oTree\\experimentMusic\\products.json', 'r', encoding='utf-8') as jsonfile:
data=jsonfile.read()
shoppinglist = json.loads(data)
with open('C:\\Files\\Test\\oTreeonlineshop-master\\shop\\oTree\\experimentMusic\\music.json', 'r', encoding='utf-8') as jsonfile:
data=jsonfile.read()
musiclist = json.loads(data)
class Subsession(BaseSubsession):
pass
class Group(BaseGroup):
pass
class Player(BasePlayer):
arousal = models.StringField(choices=[["1", ""], ["2", ""], ["3", ""], ["4", ""], ["5", ""]], widget=widgets.RadioSelectHorizontal, label="labb")
pleasure = models.StringField(choices=[["1", ""], ["2", ""], ["3", ""], ["4", ""], ["5", ""]], widget=widgets.RadioSelectHorizontal, label="labb")
dominance = models.StringField(choices=[["1", ""], ["2", ""], ["3", ""], ["4", ""], ["5", ""]], widget=widgets.RadioSelectHorizontal, label="labb")
paymentplayer = models.StringField(blank=True)
sumofprices = models.StringField(blank=True)
musicPlayed = models.StringField(blank=True)
volume_button_clicked = models.StringField(blank=True)
time_choice0 = models.StringField(blank=True)
time_choice1 = models.StringField(blank=True)
time_choice2 = models.StringField(blank=True)
time_choice3 = models.StringField(blank=True)
time_choice4 = models.StringField(blank=True)
time_choice5 = models.StringField(blank=True)
time_choice6 = models.StringField(blank=True)
time_choice7 = models.StringField(blank=True)
choice00 = models.StringField(blank=True)
choice01 = models.StringField(blank=True)
choice02 = models.StringField(blank=True)
choice03 = models.StringField(blank=True)
choice04 = models.StringField(blank=True)
choice05 = models.StringField(blank=True)
choice06 = models.StringField(blank=True)
choice07 = models.StringField(blank=True)
choice10 = models.StringField(blank=True)
choice11 = models.StringField(blank=True)
choice12 = models.StringField(blank=True)
choice13 = models.StringField(blank=True)
choice14 = models.StringField(blank=True)
choice15 = models.StringField(blank=True)
choice16 = models.StringField(blank=True)
choice17 = models.StringField(blank=True)
choice20 = models.StringField(blank=True)
choice21 = models.StringField(blank=True)
choice22 = models.StringField(blank=True)
choice23 = models.StringField(blank=True)
choice24 = models.StringField(blank=True)
choice25 = models.StringField(blank=True)
choice26 = models.StringField(blank=True)
choice27 = models.StringField(blank=True)
choice30 = models.StringField(blank=True)
choice31 = models.StringField(blank=True)
choice32 = models.StringField(blank=True)
choice33 = models.StringField(blank=True)
choice34 = models.StringField(blank=True)
choice35 = models.StringField(blank=True)
choice36 = models.StringField(blank=True)
choice37 = models.StringField(blank=True)
choice40 = models.StringField(blank=True)
choice41 = models.StringField(blank=True)
choice42 = models.StringField(blank=True)
choice43 = models.StringField(blank=True)
choice44 = models.StringField(blank=True)
choice45 = models.StringField(blank=True)
choice46 = models.StringField(blank=True)
choice47 = models.StringField(blank=True)
choice50 = models.StringField(blank=True)
choice51 = models.StringField(blank=True)
choice52 = models.StringField(blank=True)
choice53 = models.StringField(blank=True)
choice54 = models.StringField(blank=True)
choice55 = models.StringField(blank=True)
choice56 = models.StringField(blank=True)
choice57 = models.StringField(blank=True)
choice60 = models.StringField(blank=True)
choice61 = models.StringField(blank=True)
choice62 = models.StringField(blank=True)
choice63 = models.StringField(blank=True)
choice64 = models.StringField(blank=True)
choice65 = models.StringField(blank=True)
choice66 = models.StringField(blank=True)
choice67 = models.StringField(blank=True)
choice70 = models.StringField(blank=True)
choice71 = models.StringField(blank=True)
choice72 = models.StringField(blank=True)
choice73 = models.StringField(blank=True)
choice74 = models.StringField(blank=True)
choice75 = models.StringField(blank=True)
choice76 = models.StringField(blank=True)
choice77 = models.StringField(blank=True)
| [
"[email protected]"
] | |
7ad450f435acb88841c8494906fa82cf5b80c111 | d919d97fe71b4a35d26d97283f951e97fba57cc1 | /scripts/studies/cross_section_area.py | 9433ffb607a15c9d3bf91a5e67efcdb9a245d26d | [
"MIT"
] | permissive | gregstarr/ttools | 9d8130aff475238cdaac09c58a15a1c1936690a2 | fc8dcbf094370e9885311126724697830167d931 | refs/heads/main | 2023-09-01T06:21:53.363830 | 2021-10-19T00:35:16 | 2021-10-19T00:35:16 | 304,731,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,916 | py | import numpy as np
import bottleneck as bn
import pandas
import os
import matplotlib.pyplot as plt
from matplotlib import colors
from ttools import config, io, plotting
MLT_DITHER = .01
MLAT_DITHER = .01
KP_DITHER = .5
LOG_KP_DITHER = .1
E_FIELD_DITHER = .01
MLT_BINS = 40
MLAT_BINS = 40
CSA_BINS = 40
MLT_BOUNDS = [-12, 12]
MLAT_BOUNDS = [40, 80]
CSA_BOUNDS = [-4, 0]
def plot_param_mlt(trough, param, param_bins=50, param_bounds=None, name='param', norm=None, save_dir=None, time_mask=None, file_extra=None, title_extra=None):
if time_mask is None:
time_mask = np.ones(trough.shape[0], dtype=bool)
mask = np.any(trough[time_mask], axis=1)
x = np.broadcast_to(config.mlt_vals[None, :], mask.shape)
x = x + np.random.randn(*x.shape) * MLT_DITHER
y_sl = (time_mask, ) + (None, ) * (2 - param.ndim)
y = np.broadcast_to(param[y_sl], mask.shape)
if param_bounds is None:
param_bounds = np.quantile(param[np.isfinite(param)], [.01, .99])
mask &= np.isfinite(y)
fig, ax = plt.subplots(figsize=(12, 6), tight_layout=True)
(counts, *_, pcm) = ax.hist2d(x[mask], y[mask], bins=[MLT_BINS, param_bins], range=[MLT_BOUNDS, param_bounds], cmap='jet', norm=norm)
plt.colorbar(pcm)
title = f"N = {counts.sum()}{' ' + title_extra if title_extra is not None else ''}"
ax.set_title(title)
ax.set_xlabel('MLT')
ax.set_ylabel(name)
if save_dir is not None:
fn = f"{name}_mlt_dist{'_' + file_extra if file_extra is not None else ''}{'_norm' if norm is not None else ''}.png"
fig.savefig(os.path.join(save_dir, fn))
def plot_param_mlt_set(param, set_param, save_dir, name='param', set_name='set_param', bins=50, param_bounds=None, quantiles=(0, .2, .4, .6, .8, 1)):
edges = np.quantile(set_param[np.isfinite(set_param)], quantiles)
bounds = [(edges[i], edges[i + 1]) for i in range(len(edges) - 1)]
for i, bound in enumerate(bounds):
time_mask = (set_param >= bound[0]) & (set_param <= bound[1])
title_extra = f"|| {set_name} = ({bound[0]:.2f}, {bound[1]:.2f})"
plot_param_mlt(trough, param, bins, param_bounds, name, time_mask=time_mask, title_extra=title_extra,
file_extra=f"{set_name}_{i}", save_dir=save_dir)
def plot_lparam_tparam(l_param, t_param, lparam_bins=50, tparam_bins=50, tname='tparam', lname='lparam', mlt_center=0, mlt_width=1.5, save_dir=None):
mlt_mask = abs(config.mlt_vals - mlt_center) <= mlt_width
x = np.broadcast_to(t_param[:, None], l_param.shape)
tparam_bounds = np.nanquantile(t_param, [.01, .99])
lparam_bounds = np.nanquantile(l_param[l_param != 0], [.01, .99])
mask = np.isfinite(t_param)[:, None] & (l_param != 0) & mlt_mask[None, :]
fig, ax = plt.subplots(1, 2, figsize=(14, 6), tight_layout=True)
counts, *_, pcm = ax[0].hist2d(x[mask], l_param[mask], bins=[tparam_bins, lparam_bins], range=[tparam_bounds, lparam_bounds], cmap='jet')
plt.colorbar(pcm)
pcm = ax[1].pcolormesh(counts.T / np.sum(counts.T, axis=0, keepdims=True), cmap='jet')
plt.colorbar(pcm)
ax[0].set_title(f"N = {counts.sum()} || MLT = {mlt_center}")
ax[0].set_xlabel(tname)
ax[0].set_ylabel(lname)
if save_dir is not None:
fn = f"{tname}_{lname}_dist{mlt_center % 24:d}.png"
fig.savefig(os.path.join(save_dir, fn))
if __name__ == "__main__":
# Load trough dataset
trough_data = np.load("E:\\dataset.npz")
trough = trough_data['trough']
x = trough_data['x']
csa = np.nansum(x * trough, axis=1)
# Load Omni
omni = io.get_omni_data()
# Assemble
kp = io.get_kp(trough_data['time'])
log_kp = np.log10(kp + 1)
e_field = omni['e_field'][trough_data['time']].values
plot_lparam_tparam(csa, e_field, 40, 40, 'e_field', 'csa', -6)
# plot_param_mlt_set(csa, e_field, "E:\\study plots\\mlt_csa", 'csa', 'csa', CSA_BINS, CSA_BOUNDS)
plt.show()
| [
"[email protected]"
] | |
b7723239c4a46c561258470ad64b96116357489b | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=1_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=61/sched.py | d192899a28b503972848e0f3b23908f61c81e5b3 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | -X FMLP -Q 0 -L 3 124 400
-X FMLP -Q 0 -L 3 86 400
-X FMLP -Q 0 -L 3 52 200
-X FMLP -Q 1 -L 2 51 175
-X FMLP -Q 1 -L 2 47 150
-X FMLP -Q 1 -L 2 46 150
-X FMLP -Q 2 -L 1 27 200
-X FMLP -Q 2 -L 1 26 100
-X FMLP -Q 2 -L 1 18 125
-X FMLP -Q 3 -L 1 16 150
-X FMLP -Q 3 -L 1 14 100
-X FMLP -Q 3 -L 1 3 175
| [
"[email protected]"
] | |
96254c047f5ab42412198c47ef93c0af1d2d97ba | e537b9b866c6533ef4c488b0104070a3f865d40e | /joerd/store/s3.py | d0d448ec3dabf9fd4d2a674e36689e3fb00c1ac6 | [
"MIT"
] | permissive | mohammadrezabk/joerd | 0c3a65ddb746578f9c06574601dc91ea6af2de2e | 0b86765156d0612d837548c2cf70376c43b3405c | refs/heads/master | 2023-02-14T16:08:59.103192 | 2017-11-21T17:22:22 | 2017-11-21T17:22:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,965 | py | import boto3
from boto3.s3.transfer import TransferConfig
from botocore.exceptions import ClientError
from os import walk
import os.path
from contextlib2 import contextmanager
from joerd.tmpdir import tmpdir
import traceback
import sys
import time
import logging
# extension to mime type mappings to help with serving the S3 bucket as
# a web site. if we add the content-type header on upload, then S3 will
# repeat it back when the tiles are accessed.
_MIME_TYPES = {
'.png': 'image/png',
'.tif': 'image/tif',
'.xml': 'application/xml',
'.gz': 'application/x-gzip',
}
# Stores files in S3
class S3Store(object):
def __init__(self, cfg):
self.bucket_name = cfg.get('bucket_name')
self.upload_config = cfg.get('upload_config')
assert self.bucket_name is not None, \
"Bucket name not configured for S3 store, but it must be."
# cache the boto resource and s3 bucket - we don't know what this
# contains, so it seems safe to assume we can't pass it across a
# multiprocessing boundary.
self.s3 = None
self.bucket = None
# This object is likely to get pickled to send it to other processes
# for multiprocessing. However, the s3/boto objects are probably not
# safe to be pickled, so we'll just set them to None and regenerate
# them on the other side.
def __getstate__(self):
odict = self.__dict__.copy()
del odict['s3']
del odict['bucket']
return odict
def __setstate__(self, d):
self.__dict__.update(d)
self.s3 = None
self.bucket = None
def _get_bucket(self):
if self.s3 is None or self.bucket is None:
self.s3 = boto3.resource('s3')
self.bucket = self.s3.Bucket(self.bucket_name)
return self.bucket
def upload_all(self, d):
# strip trailing slashes so that we're sure that the path we create by
# removing this as a prefix does not start with a /.
if not d.endswith('/'):
d = d + "/"
transfer_config = TransferConfig(**self.upload_config)
for dirpath, dirs, files in walk(d):
if dirpath.startswith(d):
suffix = dirpath[len(d):]
self._upload_files(dirpath, suffix, files, transfer_config)
def _upload_files(self, dirpath, suffix, files, transfer_config):
for f in files:
src_name = os.path.join(dirpath, f)
s3_key = os.path.join(suffix, f)
ext = os.path.splitext(f)[1]
mime = _MIME_TYPES.get(ext)
extra_args = {}
if mime:
extra_args['ContentType'] = mime
# retry up to 6 times, waiting 32 (=2^5) seconds before the final
# attempt.
tries = 6
self.retry_upload_file(src_name, s3_key, transfer_config,
extra_args, tries)
def retry_upload_file(self, src_name, s3_key, transfer_config,
extra_args, tries, backoff=1):
logger = logging.getLogger('s3')
bucket = self._get_bucket()
try_num = 0
while True:
try:
bucket.upload_file(src_name, s3_key,
Config=transfer_config,
ExtraArgs=extra_args)
break
except StandardError as e:
try_num += 1
logger.warning("Try %d of %d: Failed to upload %s due to: %s" \
% (try_num, tries, s3_key,
"".join(traceback.format_exception(
*sys.exc_info()))))
if try_num > tries:
raise
time.sleep(backoff)
backoff *= 2
@contextmanager
def upload_dir(self):
with tmpdir() as t:
yield t
self.upload_all(t)
def exists(self, filename):
bucket = self._get_bucket()
exists = False
try:
obj = bucket.Object(filename)
obj.load()
except ClientError as e:
code = e.response['Error']['Code']
# 403 is returned instead of 404 when the bucket doesn't allow
# LIST operations, so treat that as missing as well.
if code == "404" or code == "403":
exists = False
else:
raise e
else:
exists = True
return exists
def get(self, source, dest):
try:
bucket = self._get_bucket()
obj = bucket.Object(source)
obj.download_file(dest)
except:
raise RuntimeError("Failed to download %r, due to: %s"
% (source, "".join(traceback.format_exception(
*sys.exc_info()))))
def create(cfg):
return S3Store(cfg)
| [
"[email protected]"
] | |
e97b43e6535a9db4e2321537db91770231b38db0 | 8acd480d0c5205e9f9d10fbe9dd18c32c700d567 | /summapy/summapy/summaPaths.py | f7367dd52201b179d53441bb5fad5ea07db5db3b | [] | no_license | klapo/ModTsfc | 743480783877ef76cc52eaba1b1c3e7f6ef2397d | 92f43e2c6dbc506a451a05b6efdf68445115b381 | refs/heads/master | 2020-12-03T02:13:06.865944 | 2017-06-30T19:13:06 | 2017-06-30T19:13:06 | 95,917,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,100 | py | import os
def buildFileName(fName, expID, fType='.txt'):
if not expID == '':
expID = '_' + expID
fullFilename = fName + expID + fType
return fullFilename
def checkFile(dirModel, siteID, expName, fName, expID='', fType='.txt', mode='w'):
# Checks that path exists
# Creates path to desired file, fName
# Opens file
# Returns object of the open file, fin
#
# INPUT:
# dirModel: string to model directory
# siteID: string of the site name
# expName: string of the experiment name
# fName: string of the file name. Needs to end in an underscore
# expID: string of the experiment ID
#
# Files relevant to the entire experiment/run should not provide expID.
# Files for a specific run/experiment should provide expID
# Do not include underscores in strings, they are automatically included
# File path and name
fPath = checkPath(dirModel, siteID, expName, expID)
fullFilename = buildFileName(fName, expID, fType)
newFile = fPath + '/' + fullFilename
# Open/create and return object
fin = open(newFile, mode)
return(fin)
def checkPath(dirModel, siteID, expName, expID=''):
# Checks that path exists
# Creates desired path exists
#
# INPUT:
# dirModel: string to model directory
# siteID: string of the site name
# expName: string of the experiment name
# fName: string of the file name. Needs to end in an underscore
# expID: string of the experiment ID
#
# Files relevant to the entire experiment/run should not provide expID.
# Files for a specific run/experiment should provide expID.
# Do not include underscores in strings, they are automatically included
if not expID == '':
expID = '_' + expID
# File path
if dirModel[-1] == '/':
fPath = dirModel + expName + '/' + siteID
else:
fPath = dirModel + '/' + expName + '/' + siteID
# Open file for reading
if not os.path.exists(fPath):
os.makedirs(fPath)
os.chmod(fPath, mode=0o777)
return(fPath)
| [
"[email protected]"
] | |
c767f67a27d2bf85ee3d55adcf7982bd2c6c5109 | 5dcefbd0ed48007b57d47ec2e74b16f344769271 | /models/cat_potreros.py | ac1fcecef658a5a2e434d0cccbb1a0621cd5c95a | [] | no_license | roxolea5/gana2 | 127bd0a51682a3fabe097e9b278582d515b9095e | 7452cdfb8c18db0a28234b039b0375a1d1ee7eb0 | refs/heads/master | 2022-12-29T10:22:01.491372 | 2020-10-15T01:35:17 | 2020-10-15T01:35:17 | 293,363,453 | 0 | 0 | null | 2020-10-15T01:35:18 | 2020-09-06T21:20:33 | Python | UTF-8 | Python | false | false | 363 | py | from sqlalchemy_serializer import SerializerMixin
from extensions.db import DB
db = DB.db
DB_ENGINE = 'dev'
class Potrero(db.Model, SerializerMixin):
__bind_key__ = DB_ENGINE
__tablename__ = 'cat_potreros'
__table_args__ = {
'autoload': True,
'autoload_with': DB.engines[DB_ENGINE].engine
}
def __init__(self):
pass
| [
"[email protected]"
] | |
cbd0426653d0bdcaf34cbdaf86cd071eb58163b8 | a6ab2735ff2f89adc64a4afcbfe013c1039198a1 | /scrapers/liverpool.py | 417b56688c24c6978ebc90919fea9056e20e3935 | [] | no_license | rehmanali1337/innvictus_scraper | 72e5049dd2c3d391f47d37e145edb2bf7c6a371d | bcb4e986c1922b20d61baca88e6ff03909bca518 | refs/heads/master | 2023-05-06T21:43:27.163117 | 2021-05-26T05:04:29 | 2021-05-26T05:04:29 | 341,820,871 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,929 | py | from selenium import webdriver
import asyncio
import json
from models.cache import ListCache
from models.products import LiverPoolProduct
from configs import global_vars
import logging
class LiverPoolNewProdsScraper:
def __init__(self, queue):
self.config = json.load(open(global_vars.MAIN_CONFIG_FILE_LOCATION))
self.queue = queue
print = logging.getLogger(' LiverpoolMonitor ').info
self.options = webdriver.ChromeOptions()
self.options.add_argument('--no-sandbox')
# self.options.add_argument('--headless')
self.options.add_argument('--disable-dev-shm-usage')
self.options.add_argument('start-maximized')
self.options.add_argument('disable-infobars')
self.webdriver_path = self.config.get("WEBDRIVER_PATH")
self.loop = asyncio.new_event_loop()
self.driver = None
self.URLs = [
'https://www.liverpool.com.mx/tienda/zapatos/catst1105210',
'https://www.liverpool.com.mx/tienda/zapatos/catst1010801',
'https://www.liverpool.com.mx/tienda/zapatos/catst1011086'
]
self.itter_time = 10
def start(self):
self.cache = ListCache('LiverPoolCache')
self.loop.run_until_complete(self.main())
async def main(self):
self.driver = webdriver.Chrome(
executable_path=self.webdriver_path, options=self.options)
self.driver.implicitly_wait(10)
# await self.create_cache()
while True:
try:
all_links = await self.get_all_prod_links()
print(f'[+] Got {len(all_links)} prod links!')
for link in all_links:
if not self.cache.has_item(link):
prod = await self.get_prod_details(link)
self.queue.put(prod)
self.cache.add_item(link)
await asyncio.sleep(self.itter_time)
except Exception as e:
print(e)
async def create_cache(self):
print('[+] Creating cache ..')
links = await self.get_all_prod_links()
self.cache.replace_cache(links)
print('[+] Created cache for prods')
async def get_all_prod_links(self):
links = []
for url in self.URLs:
self.driver.get(url)
prods_list = self.driver.find_elements_by_xpath(
'//li[@class="m-product__card card-masonry"]')
for prod in prods_list:
link = prod.find_element_by_tag_name('a').get_attribute('href')
links.append(link)
return links
async def get_prod_details(self, link):
self.driver.get(link)
prod = LiverPoolProduct()
prod.name = self.driver.find_element_by_xpath(
'//h1[@class="a-product__information--title"]').text
prod.link = link
out_of_stock_sizes = self.driver.find_elements_by_xpath(
'//button[@class="a-btn a-btn--actionpdp -disabled"]')
for size in out_of_stock_sizes:
prod.out_of_stock_sizes.append(size.text)
in_stock_sizes = self.driver.find_elements_by_xpath(
'//button[@class="a-btn a-btn--actionpdp"]')
for size in in_stock_sizes:
prod.in_stock_sizes.append(size.text)
prod.img_link = self.driver.find_element_by_xpath(
'//img[@id="image-real"]').get_attribute('src')
prod.color = self.driver.find_element_by_xpath(
'//p[@class="a-product__paragraphColor m-0 mt-2 mb-1"]').text.split(':')[-1].strip()
prod.price = self.driver.find_element_by_xpath(
'//p[@class="a-product__paragraphDiscountPrice m-0 d-inline "]').text.split('\n')[0].replace(',', '').replace('$', '')
return prod
# def quit_browser(self):
# if self.driver is not None:
# self.driver.quit()
# self.driver = None
| [
"[email protected]"
] | |
372ea6ce32ae8cbc41d924505045866e1e02297f | 75b3ea89493d4bdea58d0aeeb33bb69c9114764a | /LinkedLists/MoveLastNodeToFront.py | 71451cb0bb3fd0e77123103eedd3c859d96a8d77 | [] | no_license | BabuChandermaniRawat/DSA | f15fc57cbb260c9aca430fac319ab73e12b4a17f | 98ec99835f9b004405cf3f853b555bd633e71071 | refs/heads/main | 2023-05-31T06:24:04.393337 | 2021-06-08T20:44:09 | 2021-06-08T20:44:09 | 370,409,282 | 1 | 0 | null | 2021-05-25T04:18:45 | 2021-05-24T16:00:25 | null | UTF-8 | Python | false | false | 1,250 | py | class Solution:
def moveLastItemToFront(self, head: ListNode) -> ListNode:
"""
Approach 1: swapping the elements
"""
def valuesSwappingApproach(head):
if not head and not head.next: return head
first = head
result = head
while head.next is None:
head = head.next
#swap values and return the result pointer
first.val, head.val = head.val, first.val
return result
"""
Approach 2: split the linkedList into 3 parts and return the new orientation
"""
def splittingListApproach(head):
if not head and not head.next: return head
first = head
second = head.next
first.next = None
third = second
if third.next.next:
#logic for more than 2 elements in list
while third.next.next:
third = third.next
secondEnd = third
third = third.next
secondEnd.next = first
third.next = second
return third
else:
second.next = first
return second
| [
"[email protected]"
] | |
76ddc9c06c47c12598e6d3cefacae20640a6f2cb | 174edd84c3372e2f062dfb58848153391b44a9ea | /spec/logic.py | 75efefcd89862e45766efb341b03ef1d9a0d0645 | [] | no_license | psavine42/viper-server | 7803e3282c98dcc978f4f195aef87acc4861b8bd | dab58a979ab238d65735953c71cae6c288400144 | refs/heads/master | 2020-03-26T15:28:22.823335 | 2018-11-28T03:42:34 | 2018-11-28T03:42:34 | 145,045,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,533 | py | from unittest import TestCase
import numpy as np
from operator import eq
from pprint import pprint
from shapely.geometry import LineString, Point
from spec.seg_data import *
from src.rules.opers import *
from src import visualize, SystemFactory, RenderNodeSystem
from src.rules import RuleEngine, KB, heursitics
from src.rules.property import Property
from src import process, viper
from src import propogate as gp
from src.geom import rebuild_mls, to_mls
import src.structs as gr
import src.render.render_propogators as rpg
def read_props(node, k):
# print(node , ':', node.tmps)
return node.get(k, None)
_root = (2, 1, 0)
def node_line(line, prev=None):
for i in range(len(line)):
n = Node(line[i])
if prev is None:
prev = n
else:
n.connect_to(prev)
prev = n
return prev
def node_tree(pts, prev=None):
for i in range(len(pts)):
pt = pts[i]
if isinstance(pt[0], int):
n = Node(pt)
if prev is None:
prev = n
else:
n.connect_to(prev)
prev = n
elif isinstance(pt[0], tuple):
pt, rest = pts[i]
n = Node(pt)
if prev is None:
prev = n
else:
n.connect_to(prev)
node_tree(rest, n)
prev = n
return prev
class TestProp(TestCase):
def get_sys(self):
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
return viper.nx_to_nodes(system)
def test_dist_prop(self):
root = self.get_sys()
propagator = gp.DistanceFromSource()
propagator(root)
for n in root.__iter__():
pred = n.predecessors()
if len(pred) == 1:
assert pred[0].get(propagator.var) + 1 == n.get(propagator.var)
def test_order_prop(self):
root = self.get_sys()
propagator = gp.BuildOrder()
propagator(root)
order = set()
cnt = 0
for n in root.__iter__():
print(n)
cnt += 1
order.add(n.get(propagator.var))
assert len(order) == cnt
def test_dist_to_end(self):
root = self.get_sys()
propagator = gp.DistanceFromEnd()
propagator(root)
for n in root.__iter__():
if len(n.successors()) == 0:
assert n.get(propagator.var) == 0
def test_loop_neg(self):
root = self.get_sys()
propagator = gp.LoopDetector()
propagator(root, data=[])
for n in root.__iter__():
assert n.get(propagator.var) is not True
def test_loop_pos(self):
connect_loop = [(8., 8., 0), (4., 4., 0)]
SEGMENTS.append(connect_loop)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
propagator = gp.LoopDetector()
propagator(root, data=[])
for n in root.__iter__():
if n.geom in connect_loop:
assert n.get(propagator.var) is True
def test_edge_det(self):
root = self.get_sys()
propagator = gp.DirectionWriter()
propagator(root)
for n in root.__iter__():
for e in n.successors(edges=True):
print(e)
def test_overlap_resolver(self):
pass
def test_remover_sm(self):
system = SystemFactory.from_segs(
SEGMENTS, sys=viper.System, root=_root, lr='a')
system.bake()
system.gplot(fwd=True, bkwd=False)
def test_remover_cl(self):
system = SystemFactory.from_segs(
SEGMENTS_COL, sys=viper.System, root=_root, lr='a')
system.aplot()
def test_remover_lg(self):
segs = load_segs()
system = SystemFactory.from_serialized_geom(
segs, sys=viper.System, root=(-246, 45, 0))
system.bake()
system.gplot(fwd=True, bkwd=False)
def test_reverse(self):
n1 = Node(1)
n2 = Node(2)
edge = n1.connect_to(n2)
edge.reverse()
assert edge.target == n1
assert edge.source == n2
def test_merge_self(self):
n1 = [(1, 1), (1, 4.8), (1.2, 5), (1, 5.2), (1, 10)]
prev = node_line(n1)
gp.Cluster()(prev)
for n in prev.__iter__(fwd=True, bkwd=True):
print(n, *n.neighbors())
def test_geom_sims(self):
l2 = LineString([(1, 2), (1, 4), (4, 6), (4, 8)])
l1 = LineString([(1, 3), (1, 4), (4, 6), (1, 4)])
print(l1)
ds = l1.union(l1)
print(ds)
def test_adder(self):
n1 = [(1, 2), (1, 4), (4, 6), (4, 8)]
prev = node_line(n1)
ndd = Node((1, 3))
pa = gp.PointAdder(ndd)
pa(prev)
for n in prev.__iter__(fwd=True, bkwd=True):
print(n)
G = viper.nodes_to_nx(prev)
visualize.gplot(G)
def test_point(self):
point = Point(1, 8)
l3 = [(1, 3), (1, 10), (10, 6)]
r3 = to_mls(l3)
print(r3)
res = rebuild_mls(r3, point)
print(res)
tgt = to_mls([(1, 3), (1, 8), (1, 10), (10, 6)])
assert res == tgt
def test_254(self):
segs = load_segs()
segs, syms = SystemFactory.to_segments(segs)
fsg = []
fsm = []
print(syms[0])
mx = -260
mn = -270
for seg in segs:
sg = list(seg.coords)
if mn < sg[0][0] < mx or mn < sg[1][0] < mx:
fsg.append(seg)
for seg in syms:
sg = list(seg.coords)
if mn < sg[0][0] < mx:
fsm.append(seg)
print(fsm[0])
system = viper.SystemV3(segments=fsg, symbols=fsm, root=(-246, 45, 0))
system.aplot()
class TestRenderProp(TestCase):
def test_riser_fn(self):
root = self.test_translate()
rcp = viper.System.recipe()
rcp(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
renderer = RenderNodeSystem()
root = renderer.render(root)
print('nodes ', len(root))
visualize.print_iter(root)
meta = Eng.annotate_type(root, rules.final_labels)
visualize.plot3d(root, meta)
def test_translate(self):
root = vertical_branch()
end1 = gr.node_at(root, (8, 6, 0))
root2 = vertical_branch()
rpg.Translate()(root2, data=np.array([8, 8, 0]))
end1.connect_to(root2)
return root
# visualize.plot3d(root2, {})
class TestLogic(TestCase):
def tearDown(self):
self.term = None
def test_prop1(self):
cond = IF('nsucs', eq, 0)
isEnd = Property('IsEnd', cond)
node1 = Node(1)
assert cond(node1) is True
res1 = isEnd(node1)
assert res1 is True
assert node1.get('IsEnd') is True
node2 = Node(2)
node1.connect_to(node2)
assert cond(node1) is False
def test_and(self):
is_symbol = HAS('symbol')
is_end = IF('nsucs', eq, 0)
is_circle = IF('symbol', eq, GeomType.CIRCLE)
is_drop_head = AND(is_end, is_circle)
# setup Nodes
n0 = Node(0)
n1 = Node(1, symbol=GeomType.CIRCLE)
n2 = Node(2, symbol=GeomType.CIRCLE)
# graph
n0.connect_to(n1)
n1.connect_to(n2)
assert is_drop_head(n1) is False
assert is_drop_head(n2) is True
assert is_symbol(n0) is False
assert is_symbol(n1) is True
def test_itm(self):
n0 = Node(0, symbol=GeomType.CIRCLE)
n1 = Node(1)
n2 = Node(2, symbol=GeomType.CIRCLE)
n0.connect_to(n1)
n1.connect_to(n2)
read_props(n2, 'IsDrop')
# assert self.term(n0) is True
read_props(n2, 'IsDrop')
print('\n')
print(n0, n0.tmps)
print(n1, n1.tmps)
print(n2, n2.tmps)
assert read_props(n2, 'IsDrop') is True
assert read_props(n0, 'IsRiser') is True
assert not read_props(n2, 'IsRiser')
def test_eng(self):
print('\n')
rl = RuleEngine(term_rule=self.term)
pprint(rl._freq)
def test_eng2(self):
from src.rules.heursitics import EngineHeurFP
rules = EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
root = Eng.yield_queue(root)
nxg = Eng.plot(root, rules.final_labels)
def test_compile_eng3(self):
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root)
Kb = KB(rules.root)
print(Kb.get_vars())
print(Kb.agenda)
def test_eng3(self):
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=400, debug=True, nlog=1)
_root = (2, 1, 0)
system = SystemFactory.from_segs(SEGMENTS, root=_root, lr='a')
system = system.bake()
root = viper.nx_to_nodes(system)
Kb = KB(rules.root)
print(Kb)
root = Eng.alg2(root, Kb, )
nxg = Eng.plot(root, rules.final_labels)
def test_eng4(self):
system = SystemFactory.from_serialized_geom(load_segs(),
sys=viper.System,
root=(-246, 45, 0))
system = system.bake()
root = viper.nx_to_nodes(system)
print(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=2500, debug=False, nlog=20)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
nxg = Eng.plot(root, rules.final_labels)
def test_eng5(self):
data = load_segs(fl='1535158393.0-revit-signal')
system = SystemFactory.from_serialized_geom(
data, sys=viper.System, root=(-246, 45, 0))
system = system.bake()
root = system.root
print(root)
rules = heursitics.EngineHeurFP()
Eng = RuleEngine(term_rule=rules.root, mx=2500, debug=False, nlog=20)
Kb = KB(rules.root)
root = Eng.alg2(root, Kb)
print('nodes ', len(root))
renderer = RenderNodeSystem()
meta = Eng.annotate_type(root, rules.final_labels)
root = renderer.render(root)
print('nodes ', len(root))
visualize.plot3d(root, meta)
def test_eng_full(self):
"""
Test the engine as executed by server
"""
import time
start = time.time()
data = load_segs(fl='1535158393.0-revit-signal')
points = [[-246.0000000012448, 45.31190012691635, 0.0]]
proc = process.SystemProcessorV3()
ds = proc.process(data, points, system_type='FP')
[print(k, len(v)) for k, v in ds.items()]
visualize.dump_data(ds)
for g in ds['geom']:
x1, y1, z1, x2, y2, z2 = g
res = [x1 == x2, y1 == y2, z1 == z2]
assert not all(res)
end = time.time()
print('time {} secs'.format(end - start))
def test_loadsyms(self):
segs = load_segs()
ds = [x for x in segs if x['children'] != []]
system = SystemFactory.from_serialized_geom(ds, root=(-246, 45, 0))
| [
"[email protected]"
] | |
249da4760ecd8254331c7befd8d0738778611bc5 | fc558ed0bccbbd0edf52e662b310168a1b97ab56 | /tests/pcie/test_pcie.py | 5d1ce746a21aa2e7bcc4a0d8b0844f115ebdeb15 | [
"MIT"
] | permissive | bigdot123456/cocotbext-pcie | 914f4ac458901d93edb4447e65c24a1b30f39ea1 | 19e891adf9c45226cbbe2184199be7d904d0901e | refs/heads/master | 2023-06-07T07:38:47.289302 | 2021-06-17T08:59:28 | 2021-06-17T08:59:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,020 | py | #!/usr/bin/env python
"""
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import os
import cocotb_test.simulator
import cocotb
from cocotb.regression import TestFactory
from cocotbext.pcie.core import RootComplex, MemoryEndpoint, Device, Switch
from cocotbext.pcie.core.caps import MsiCapability
from cocotbext.pcie.core.utils import PcieId
class TestEndpoint(MemoryEndpoint):
__test__ = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.vendor_id = 0x1234
self.device_id = 0x5678
self.msi_cap = MsiCapability()
self.msi_cap.msi_multiple_message_capable = 5
self.msi_cap.msi_64bit_address_capable = 1
self.msi_cap.msi_per_vector_mask_capable = 1
self.register_capability(self.msi_cap)
self.add_mem_region(1024*1024)
self.add_prefetchable_mem_region(1024*1024)
self.add_io_region(1024)
class TB:
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
self.rc = RootComplex()
self.ep = []
ep = TestEndpoint()
self.dev = Device(ep)
self.dev.upstream_port.max_link_speed = 3
self.dev.upstream_port.max_link_width = 16
self.ep.append(ep)
self.rc.make_port().connect(self.dev)
self.sw = Switch()
self.rc.make_port().connect(self.sw)
ep = TestEndpoint()
self.dev2 = Device(ep)
self.dev2.upstream_port.max_link_speed = 3
self.dev2.upstream_port.max_link_width = 16
self.ep.append(ep)
self.sw.make_port().connect(self.dev2)
ep = TestEndpoint()
self.dev3 = Device(ep)
self.dev3.upstream_port.max_link_speed = 3
self.dev3.upstream_port.max_link_width = 16
self.ep.append(ep)
self.sw.make_port().connect(self.dev3)
ep = TestEndpoint()
self.dev4 = Device(ep)
self.dev4.upstream_port.max_link_speed = 3
self.dev4.upstream_port.max_link_width = 16
self.ep.append(ep)
self.rc.make_port().connect(self.dev4)
async def run_test_rc_mem(dut):
tb = TB(dut)
tb.rc.log.setLevel(logging.DEBUG)
mem_base, mem_data = tb.rc.alloc_region(1024*1024)
io_base, io_data = tb.rc.alloc_io_region(1024)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation length: %d offset: %d", length, offset)
addr = mem_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data)
assert mem_data[offset:offset+length] == test_data
assert await tb.rc.mem_read(addr, length) == test_data
for length in list(range(1, 32)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = io_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.io_write(addr, test_data)
assert io_data[offset:offset+length] == test_data
assert await tb.rc.io_read(addr, length) == test_data
async def run_test_config(dut):
tb = TB(dut)
tb.rc.log.setLevel(logging.DEBUG)
tb.log.info("Read complete config space")
orig = await tb.rc.config_read(PcieId(0, 1, 0), 0x000, 256, 1000, 'ns')
tb.log.info("Read and write interrupt line register")
await tb.rc.config_write(PcieId(0, 1, 0), 0x03c, b'\x12', 1000, 'ns')
val = await tb.rc.config_read(PcieId(0, 1, 0), 0x03c, 1, 1000, 'ns')
assert val == b'\x12'
tb.log.info("Write complete config space")
await tb.rc.config_write(PcieId(0, 1, 0), 0x000, orig, 1000, 'ns')
async def run_test_enumerate(dut):
tb = TB(dut)
all_ep = tb.rc.endpoints+[tb.sw.upstream_bridge]+tb.sw.endpoints+tb.ep
tb.rc.log.setLevel(logging.DEBUG)
for ep in all_ep:
ep.log.setLevel(logging.DEBUG)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
# check that enumerated tree matches devices
def check_dev(dev):
tb.log.info("Check device at %s", dev.pcie_id)
# ensure ID was assigned to device
assert dev.pcie_id != PcieId(0, 0, 0)
# get tree item
ti = tb.rc.tree.find_child_dev(dev.pcie_id)
assert ti is not None
# check informational registers
tb.log.info("Header type: 0x%02x", ti.header_type)
tb.log.info("Vendor ID: 0x%04x", ti.vendor_id)
tb.log.info("Device ID: 0x%04x", ti.device_id)
tb.log.info("Revision ID: 0x%02x", ti.revision_id)
tb.log.info("Class code: 0x%06x", ti.class_code)
assert ti.header_type == dev.header_layout | (bool(dev.multifunction_device) << 7)
assert ti.class_code == dev.class_code
assert ti.revision_id == dev.revision_id
assert ti.vendor_id == dev.vendor_id
assert ti.device_id == dev.device_id
if ti.header_type & 0x7f == 0x01:
# bridge
bar_cnt = 2
# check bridge registers
tb.log.info("Primary bus %d", ti.pri_bus_num)
tb.log.info("Secondary bus %d", ti.sec_bus_num)
tb.log.info("Subordinate bus %d", ti.sub_bus_num)
tb.log.info("IO base 0x%08x", ti.io_base)
tb.log.info("IO limit 0x%08x", ti.io_limit)
tb.log.info("Mem base 0x%08x", ti.mem_base)
tb.log.info("Mem limit 0x%08x", ti.mem_limit)
tb.log.info("Prefetchable mem base 0x%016x", ti.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit 0x%016x", ti.prefetchable_mem_limit)
assert ti.sec_bus_num == dev.sec_bus_num
assert ti.sub_bus_num == dev.sub_bus_num
assert ti.io_base == dev.io_base
assert ti.io_limit == dev.io_limit
assert ti.mem_base == dev.mem_base
assert ti.mem_limit == dev.mem_limit
assert ti.prefetchable_mem_base == dev.prefetchable_mem_base
assert ti.prefetchable_mem_limit == dev.prefetchable_mem_limit
else:
bar_cnt = 6
tb.log.info("Subsystem vendor ID: 0x%04x", ti.subsystem_vendor_id)
tb.log.info("Subsystem ID: 0x%04x", ti.subsystem_id)
assert ti.subsystem_vendor_id == dev.subsystem_vendor_id
assert ti.subsystem_id == dev.subsystem_id
# check BARs
bar = 0
while bar < bar_cnt:
if d.bar_mask[bar] == 0:
# unused bar
assert ti.bar[bar] is None
assert ti.bar_raw[bar] == 0
assert ti.bar_addr[bar] is None
assert ti.bar_size[bar] is None
bar += 1
elif d.bar[bar] & 1:
# IO BAR
tb.log.info("BAR%d: IO BAR addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar]
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_addr[bar] == d.bar[bar] & ~0x3
assert ti.bar_size[bar] == (~d.bar_mask[bar] & 0xfffffffc)+0x4
bar += 1
elif d.bar[bar] & 4:
# 64 bit BAR
tb.log.info("BAR%d: Mem BAR (32 bit) addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar] | d.bar[bar+1] << 32
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_raw[bar+1] == d.bar[bar+1]
assert ti.bar_addr[bar] == (d.bar[bar] | d.bar[bar+1] << 32) & ~0xf
assert ti.bar_size[bar] == (~(d.bar_mask[bar] | d.bar_mask[bar+1] << 32) & 0xfffffffffffffff0)+0x10
bar += 2
else:
# 32 bit BAR
tb.log.info("BAR%d: Mem BAR (64 bit) addr 0x%08x, size %d", bar, ti.bar_addr[bar], ti.bar_size[bar])
assert ti.bar[bar] == d.bar[bar]
assert ti.bar_raw[bar] == d.bar[bar]
assert ti.bar_addr[bar] == d.bar[bar] & ~0xf
assert ti.bar_size[bar] == (~d.bar_mask[bar] & 0xfffffff0)+0x10
bar += 1
if d.expansion_rom_addr_mask == 0:
assert ti.expansion_rom_raw == 0
assert ti.expansion_rom_addr is None
assert ti.expansion_rom_size is None
else:
assert ti.expansion_rom_raw & 0xfffff800 == dev.expansion_rom_addr
assert ti.expansion_rom_addr == dev.expansion_rom_addr
assert ti.expansion_rom_size == (~d.expansion_rom_addr_mask & 0xfffff800)+0x800
# TODO capabilities
for d in all_ep:
check_dev(d)
# check settings in enumerated tree
def check_tree(ti):
tb.log.info("Check bridge at %s", ti.pcie_id)
tb.log.info("Header type: 0x%02x", ti.header_type)
tb.log.info("Vendor ID: 0x%04x", ti.vendor_id)
tb.log.info("Device ID: 0x%04x", ti.device_id)
tb.log.info("Revision ID: 0x%02x", ti.revision_id)
tb.log.info("Class code: 0x%06x", ti.class_code)
tb.log.info("Primary bus: %d", ti.pri_bus_num)
tb.log.info("Secondary bus: %d", ti.sec_bus_num)
tb.log.info("Subordinate bus: %d", ti.sub_bus_num)
tb.log.info("IO base: 0x%08x", ti.io_base)
tb.log.info("IO limit: 0x%08x", ti.io_limit)
tb.log.info("Mem base: 0x%08x", ti.mem_base)
tb.log.info("Mem limit: 0x%08x", ti.mem_limit)
tb.log.info("Prefetchable mem base: 0x%016x", ti.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit: 0x%016x", ti.prefetchable_mem_limit)
bus_regions = []
io_regions = []
mem_regions = []
prefetchable_mem_regions = []
for ci in ti:
tb.log.info("Check device at %s", ci.pcie_id)
tb.log.info("Header type: 0x%02x", ci.header_type)
tb.log.info("Vendor ID: 0x%04x", ci.vendor_id)
tb.log.info("Device ID: 0x%04x", ci.device_id)
tb.log.info("Revision ID: 0x%02x", ci.revision_id)
tb.log.info("Class code: 0x%06x", ci.class_code)
if ci.header_type & 0x7f == 0x00:
# type 0 header
tb.log.info("Subsystem vendor ID: 0x%04x", ci.subsystem_vendor_id)
tb.log.info("Subsystem ID: 0x%04x", ci.subsystem_id)
# check that BARs are within our apertures
for bar in range(6):
if ci.bar[bar] is None:
continue
if ci.bar[bar] & 1:
# IO BAR
tb.log.info("BAR%d: IO BAR addr 0x%08x, size %d", bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.io_base <= ci.bar_addr[bar] and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.io_limit)
io_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
elif ci.bar[bar] > 0xffffffff:
# prefetchable BAR
tb.log.info("BAR%d: Mem BAR (prefetchable) addr 0x%08x, size %d",
bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.prefetchable_mem_base <= ci.bar_addr[bar]
and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.prefetchable_mem_limit)
prefetchable_mem_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
else:
# non-prefetchable BAR
tb.log.info("BAR%d: Mem BAR (non-prefetchable) addr 0x%08x, size %d",
bar, ci.bar_addr[bar], ci.bar_size[bar])
assert (ti.mem_base <= ci.bar_addr[bar]
and ci.bar_addr[bar]+ci.bar_size[bar]-1 <= ti.mem_limit)
mem_regions.append((ci.bar_addr[bar], ci.bar_addr[bar]+ci.bar_size[bar]-1))
if ci.expansion_rom_addr:
# expansion ROM BAR
tb.log.info("Expansion ROM BAR: Mem BAR (non-prefetchable) addr 0x%08x, size %d",
ci.expansion_rom_addr, ci.expansion_rom_size)
assert (ti.mem_base <= ci.expansion_rom_addr and
ci.expansion_rom_addr+ci.expansion_rom_size-1 <= ti.mem_limit)
mem_regions.append((ci.expansion_rom_addr, ci.expansion_rom_addr+ci.expansion_rom_size-1))
if ci.header_type & 0x7f == 0x01:
# type 1 header
tb.log.info("Primary bus: %d", ci.pri_bus_num)
tb.log.info("Secondary bus: %d", ci.sec_bus_num)
tb.log.info("Subordinate bus: %d", ci.sub_bus_num)
tb.log.info("IO base: 0x%08x", ci.io_base)
tb.log.info("IO limit: 0x%08x", ci.io_limit)
tb.log.info("Mem base: 0x%08x", ci.mem_base)
tb.log.info("Mem limit: 0x%08x", ci.mem_limit)
tb.log.info("Prefetchable mem base: 0x%016x", ci.prefetchable_mem_base)
tb.log.info("Prefetchable mem limit: 0x%016x", ci.prefetchable_mem_limit)
# check that child switch apertures are within our apertures
assert ti.sec_bus_num <= ci.pri_bus_num <= ti.sub_bus_num
assert ti.sec_bus_num <= ci.sec_bus_num and ci.sub_bus_num <= ti.sub_bus_num
bus_regions.append((ci.sec_bus_num, ci.sub_bus_num))
if ci.io_base:
assert ti.io_base <= ci.io_base and ci.io_limit <= ti.io_limit
io_regions.append((ci.io_base, ci.io_limit))
if ci.mem_base:
assert ti.mem_base <= ci.mem_base and ci.mem_limit <= ti.mem_limit
mem_regions.append((ci.mem_base, ci.mem_limit))
if ci.prefetchable_mem_base:
assert (ti.prefetchable_mem_base <= ci.prefetchable_mem_base and
ci.prefetchable_mem_limit <= ti.prefetchable_mem_limit)
prefetchable_mem_regions.append((ci.prefetchable_mem_base, ci.prefetchable_mem_limit))
# check for assignment overlaps
for lst in [bus_regions, io_regions, mem_regions, prefetchable_mem_regions]:
lst.sort()
for m in range(1, len(lst)):
assert lst[m-1][1] <= lst[m][0], "assigned regions overlap"
# recurse into child nodes
for ci in ti:
if ci.header_type & 0x7f == 0x01:
tb.log.info("Check bridge at %s (child of bridge at %s)", ci.pcie_id, ti.pcie_id)
check_tree(ci)
check_tree(tb.rc.tree)
async def run_test_ep_mem(dut, ep_index=0):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
ti = tb.rc.tree.find_child_dev(ep.pcie_id)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (32-bit BAR) length: %d offset: %d", length, offset)
addr = ti.bar_addr[0]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await tb.rc.mem_read(addr, 1, 1000, 'ns')
assert await ep.read_region(0, offset, length) == test_data
assert await tb.rc.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (64-bit BAR) length: %d offset: %d", length, offset)
addr = ti.bar_addr[1]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await tb.rc.mem_read(addr, 1, 1000, 'ns')
assert await ep.read_region(1, offset, length) == test_data
assert await tb.rc.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = ti.bar_addr[3]+offset
test_data = bytearray([x % 256 for x in range(length)])
await tb.rc.io_write(addr, test_data, 1000, 'ns')
assert await ep.read_region(3, offset, length) == test_data
assert await tb.rc.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_p2p_dma(dut, ep1_index=0, ep2_index=1):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep1 = tb.ep[ep1_index]
ep1.log.setLevel(logging.DEBUG)
ep2 = tb.ep[ep2_index]
ep2.log.setLevel(logging.DEBUG)
ti2 = tb.rc.tree.find_child_dev(ep2.pcie_id)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (32-bit BAR) length: %d offset: %d", length, offset)
addr = ti2.bar_addr[0]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep1.mem_read(addr, 1, 1000, 'ns')
assert await ep2.read_region(0, offset, length) == test_data
assert await ep1.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (64-bit BAR) length: %d offset: %d", length, offset)
addr = ti2.bar_addr[1]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep1.mem_read(addr, 1, 1000, 'ns')
assert await ep2.read_region(1, offset, length) == test_data
assert await ep1.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation length: %d offset: %d", length, offset)
addr = ti2.bar_addr[3]+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep1.io_write(addr, test_data, 1000, 'ns')
assert await ep2.read_region(3, offset, length) == test_data
assert await ep1.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_dma(dut, ep_index=0):
tb = TB(dut)
mem_base, mem_data = tb.rc.alloc_region(1024*1024)
io_base, io_data = tb.rc.alloc_io_region(1024)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
for length in list(range(1, 32))+[1024]:
for offset in list(range(8))+list(range(4096-8, 4096)):
tb.log.info("Memory operation (DMA) length: %d offset: %d", length, offset)
addr = mem_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep.mem_write(addr, test_data, 1000, 'ns')
# wait for write to complete
await ep.mem_read(addr, 1, 1000, 'ns')
assert mem_data[offset:offset+length] == test_data
assert await ep.mem_read(addr, length, 1000, 'ns') == test_data
for length in list(range(1, 8)):
for offset in list(range(8)):
tb.log.info("IO operation (DMA) length: %d offset: %d", length, offset)
addr = io_base+offset
test_data = bytearray([x % 256 for x in range(length)])
await ep.io_write(addr, test_data, 1000, 'ns')
assert io_data[offset:offset+length] == test_data
assert await ep.io_read(addr, length, 1000, 'ns') == test_data
async def run_test_msi(dut, ep_index=0):
tb = TB(dut)
await tb.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
tb.rc.log.setLevel(logging.DEBUG)
ep = tb.ep[ep_index]
ep.log.setLevel(logging.DEBUG)
for k in range(32):
tb.log.info("Send MSI %d", k)
await ep.msi_cap.issue_msi_interrupt(k)
event = tb.rc.msi_get_event(ep.pcie_id, k)
event.clear()
await event.wait()
if cocotb.SIM_NAME:
for test in [
run_test_rc_mem,
run_test_config,
run_test_enumerate,
]:
factory = TestFactory(test)
factory.generate_tests()
factory = TestFactory(run_test_ep_mem)
factory.add_option("ep_index", range(4))
factory.generate_tests()
factory = TestFactory(run_test_p2p_dma)
factory.add_option("ep1_index", [0, 1])
factory.add_option("ep2_index", [2, 3])
factory.generate_tests()
factory = TestFactory(run_test_dma)
factory.add_option("ep_index", range(4))
factory.generate_tests()
factory = TestFactory(run_test_msi)
factory.add_option("ep_index", range(4))
factory.generate_tests()
# cocotb-test
tests_dir = os.path.dirname(__file__)
def test_pcie(request):
dut = "test_pcie"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(os.path.dirname(__file__), f"{dut}.v"),
]
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
sim_build=sim_build,
)
| [
"[email protected]"
] | |
2848a24526ee941ad0e15cb48b0e707173850625 | 25c03427485d43c41ff124e7ac2b856cd586596f | /account/models.py | df3e145488206ca42d63d4e679852d7e68cc3c8d | [] | no_license | jfeldman777/s-cool-project | d1012cef7ca9835e6cdaf85495eca7aee191161f | adb38eb3932841e692e70f01e9688bad4bece3f8 | refs/heads/master | 2020-05-26T08:16:28.890998 | 2017-06-05T17:49:09 | 2017-06-05T17:49:09 | 82,473,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | from django.db import models
# Create your models here.
from django.contrib.auth.models import User
#from .fields import AutoOneToOneField
#class Work(models.Model):
# work = models.CharField(max_length = 100, verbose_name = 'Работа')
#class Profile(models.Model):
# user = AutoOneToOneField(User, related_name='profile', verbose_name=('User'), primary_key=True)
# work = models.ForeignKey(Work, verbose_name = 'Вид деятельности')
| [
"[email protected]"
] | |
fdef72e6ed2b89d6e3312ca8d0abab76e55416d7 | 4f4d47d60e17f0e3b7120ebb26f3d83e0a1f8e66 | /tf_agents/bandits/environments/random_bandit_environment.py | 735af739c1d680f16bcb6a4df8ef9ba29e2bd8e5 | [
"Apache-2.0"
] | permissive | tfboyd/agents | 644ff1ee3961ac629671110c45f6c90234bd0ad1 | 858ee36aaaea6fbcf0e5ab1c12929c77bd17abae | refs/heads/master | 2020-11-28T15:46:31.635917 | 2020-06-26T06:05:57 | 2020-06-26T06:05:57 | 229,859,259 | 2 | 0 | Apache-2.0 | 2020-06-26T15:34:23 | 2019-12-24T02:56:28 | Python | UTF-8 | Python | false | false | 5,146 | py | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bandit environment that returns random observations and rewards."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.bandits.environments import bandit_tf_environment as bte
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step
__all__ = ['RandomBanditEnvironment']
def _raise_batch_shape_error(distribution_name, batch_shape):
raise ValueError('`{distribution_name}` must have batch shape with length 1; '
'got {batch_shape}. Consider using '
'`tensorflow_probability.distributions.Independent` '
'to manipulate batch and event shapes.'.format(
distribution_name=distribution_name,
batch_shape=batch_shape))
class RandomBanditEnvironment(bte.BanditTFEnvironment):
"""Bandit environment that returns random observations and rewards."""
def __init__(self,
observation_distribution,
reward_distribution,
action_spec=None):
"""Initializes an environment that returns random observations and rewards.
Note that `observation_distribution` and `reward_distribution` are expected
to have batch rank 1. That is, `observation_distribution.batch_shape` should
have length exactly 1. `tensorflow_probability.distributions.Independent` is
useful for manipulating batch and event shapes. For example,
```python
observation_distribution = tfd.Independent(tfd.Normal(tf.zeros([12, 3, 4]),
tf.ones([12, 3, 4])))
env = RandomBanditEnvironment(observation_distribution, ...)
env.observation_spec # tensor_spec.TensorSpec(shape=[3, 4], ...)
env.batch_size # 12
```
Args:
observation_distribution: a `tensorflow_probability.Distribution`.
Batches of observations will be drawn from this distribution. The
`batch_shape` of this distribution must have length 1 and be the same as
the `batch_shape` of `reward_distribution`.
reward_distribution: a `tensorflow_probability.Distribution`.
Batches of rewards will be drawn from this distribution. The
`batch_shape` of this distribution must have length 1 and be the same as
the `batch_shape` of `observation_distribution`.
action_spec: a `TensorSpec` describing the expected action. Note that
actions are ignored and do not affect rewards.
"""
observation_batch_shape = observation_distribution.batch_shape
reward_batch_shape = reward_distribution.batch_shape
reward_event_shape = reward_distribution.event_shape
if observation_batch_shape.rank != 1:
_raise_batch_shape_error(
'observation_distribution', observation_batch_shape)
if reward_batch_shape.rank != 1:
_raise_batch_shape_error(
'reward_distribution', observation_batch_shape)
if reward_event_shape.rank != 0:
raise ValueError('`reward_distribution` must have event_shape (); '
'got {}'.format(reward_event_shape))
if reward_distribution.dtype != tf.float32:
raise ValueError('`reward_distribution` must have dtype float32; '
'got {}'.format(reward_distribution.float32))
if observation_batch_shape[0] != reward_batch_shape[0]:
raise ValueError(
'`reward_distribution` and `observation_distribution` must have the '
'same batch shape; got {} and {}'.format(
reward_batch_shape, observation_batch_shape))
batch_size = tf.compat.dimension_value(observation_batch_shape[0])
self._observation_distribution = observation_distribution
self._reward_distribution = reward_distribution
observation_spec = tensor_spec.TensorSpec(
shape=self._observation_distribution.event_shape,
dtype=self._observation_distribution.dtype,
name='observation_spec')
time_step_spec = time_step.time_step_spec(observation_spec)
super(RandomBanditEnvironment, self).__init__(time_step_spec=time_step_spec,
action_spec=action_spec,
batch_size=batch_size)
def _apply_action(self, action):
del action # unused
return self._reward_distribution.sample()
def _observe(self):
return self._observation_distribution.sample()
| [
"[email protected]"
] | |
9caca0f8ce96b7d3bbf4fae71bf63c14a480dee5 | e6c63e69a490f2292d2184e0edc1c6e44542a9fe | /accounts/migrations/0001_initial.py | 10c2390a8fdc4fae515783c2474aa32be7d051e3 | [] | no_license | Code-Institute-Submissions/the-issue-tracker | ed13afbaecfe69e89fd5dfdb0ba47e307b6a00a8 | d66c5295af62beb2d1fe0a73b54cb9c8f6893cb1 | refs/heads/master | 2020-08-01T06:27:11.317277 | 2019-09-25T16:55:31 | 2019-09-25T16:55:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,000 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-06-29 16:39
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gender', models.CharField(blank=True, choices=[('M', 'Male'), ('F', 'Female')], max_length=1, null=True)),
('avatar', models.ImageField(default='../media/profile_images/male_def.png', upload_to='profile_images')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
30b997e0eababa53ec5866219513d392e2a3837f | a8f724826bc48b01d565ba3420c42e002d4393e1 | /competitive-programming/week-1/Day-1/product_of_other_numbers.py | d0a60febf4983a7d5d4bba96aa7bf66ea1c1f889 | [] | no_license | sheelajyothsna/competetive-programming | de605e87e2d436bfd392a9fd82616351868892bc | 55517702b5949291046b6aff6a8d15069bd3cf39 | refs/heads/master | 2020-03-21T15:10:33.555353 | 2018-07-19T06:53:54 | 2018-07-19T06:53:54 | 138,697,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,066 | py | import unittest
def get_products_of_all_ints_except_at_index(l):
# Make a list with the products
if len(l) < 2:
raise ValueError("cannot be defined")
productlist = [1] * len(l)
left = 1
for i in range(len(productlist)):
productlist[i] = productlist[i] * left
left = left * l[i]
right = 1
for i in range(len(productlist) - 1, -1, -1):
productlist[i] = productlist[i] * right
right = right * l[i]
return productlist
# Tests
class Test(unittest.TestCase):
def test_small_list(self):
actual = get_products_of_all_ints_except_at_index([1, 2, 3])
expected = [6, 3, 2]
self.assertEqual(actual, expected)
def test_longer_list(self):
actual = get_products_of_all_ints_except_at_index([8, 2, 4, 3, 1, 5])
expected = [120, 480, 240, 320, 960, 192]
self.assertEqual(actual, expected)
def test_list_has_one_zero(self):
actual = get_products_of_all_ints_except_at_index([6, 2, 0, 3])
expected = [0, 0, 36, 0]
self.assertEqual(actual, expected)
def test_list_has_two_zeros(self):
actual = get_products_of_all_ints_except_at_index([4, 0, 9, 1, 0])
expected = [0, 0, 0, 0, 0]
self.assertEqual(actual, expected)
def test_one_negative_number(self):
actual = get_products_of_all_ints_except_at_index([-3, 8, 4])
expected = [32, -12, -24]
self.assertEqual(actual, expected)
def test_all_negative_numbers(self):
actual = get_products_of_all_ints_except_at_index([-7, -1, -4, -2])
expected = [-8, -56, -14, -28]
self.assertEqual(actual, expected)
def test_error_with_empty_list(self):
with self.assertRaises(Exception):
get_products_of_all_ints_except_at_index([])
def test_error_with_one_number(self):
with self.assertRaises(Exception):
get_products_of_all_ints_except_at_index([1])
unittest.main(verbosity=2)
| [
"[email protected]"
] | |
6f7cf0647233c24297e242eb8919f003421920e2 | 259b7b74ac3945293aca2f4e9e5b0c55b40bb6eb | /services/web/test.py | af03022dea37facf3a089cd2f2ccd88b7f6ebe83 | [] | no_license | MILKTON/eight-queens-challenge | 7322fd1fe20e3db479cda0b0dded37f94394f13d | eebc7655e473cb7aea38e80262d2c75c0042559a | refs/heads/main | 2023-01-12T18:09:00.699474 | 2020-11-06T02:24:45 | 2020-11-06T02:24:45 | 306,782,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | from project.reinas import coloca_reinas
soluciones = [0,1,0,0,2,10,4,40,92,352,724,2680]
def test_soluciones():
for i in range(len(soluciones)):
aux = len(coloca_reinas(i))
assert aux == soluciones[i]
def test_reina5():
aux = len(coloca_reinas(5))
assert aux == soluciones[5]
'''
Prueba que falla
def test_reina3():
soluciones = [0,1,0,0,2,10,4,40,92,352,724,2680]
aux = len(coloca_reinas(3))
assert aux == soluciones[5]
'''
| [
"[email protected]"
] | |
6b30625c9f376bf2b48d72f16116be482becfa26 | 79206da036de3f36339c8c6a8e9f9f29c38d4d52 | /scraping/scraping.py | 0369768db4ce7ab593a4af897fc9c26ea717ef87 | [] | no_license | BanSolo/SSD_price_predictor | a144af1edfc86a14e0c2eb242c31c90770268660 | 36a292ff9eae0a37b506a8f0b5707e760045a565 | refs/heads/main | 2023-08-11T07:17:13.485426 | 2021-09-20T17:29:17 | 2021-09-20T17:29:17 | 361,111,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,848 | py | # -*- coding: utf-8 -*-
import pandas as pd
import requests
from bs4 import BeautifulSoup
base_url = 'https://ipon.hu/shop/termek/'
product_links = []
product_prices = []
# az összes oldalon végig iterálva elmentjük a linkeket és az árakat
for page in range(1, 22):
r = requests.get(f'https://ipon.hu/shop/group/4055/product/data?page={page}')
data = r.json()
items = data['items']
# a termék linkeket és a termék árakat hozzáadjuk a tömbökhöz
for i in range(len(items)):
product_links.append(base_url + data['items'][i]['slug'] + '/' + str(data['items'][i]['id']))
product_prices.append(data['items'][i]['grossPrice'])
# a linkeken található információkat eltároljuk
ssd_array = []
for idx, link in enumerate(product_links):
# print(idx, ' ', link)
r = requests.get(link)
soup = BeautifulSoup(r.content, 'lxml')
# termék neve
name = ' '.join(soup.find('h2', class_='product__title').text.split())
# termék specifikációit tartalmazó táblázat
try:
table = soup.find('table', attrs={'class':'product-table'})
table_body = table.find('tbody')
rows = table_body.find_all('tr')
values_array = []
cols_array = []
for row in rows:
cols = row.find_all('td')
cols_array.append(' '.join(cols[0].text.split()))
values_array.append(' '.join(cols[1].text.split()))
except:
continue
# dictionary az adatok párosításához
ssd = {}
ssd['Megnevezés'] = name
for i in range(len(cols_array)):
ssd[cols_array[i]] = values_array[i]
ssd['Ár (FT)'] = product_prices[idx]
ssd_array.append(ssd)
# kimentjük az adatokat
df = pd.DataFrame(ssd_array)
#df.to_csv('../data/ssd.csv', index=False, encoding='utf-8')
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.