blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d060a46cde9a9f5c3d26898d93162ce5959a19d4 | 993e8b85dff73dccafa4f8dc2dac38615f606715 | /great_powers.py | ba0d9dcd0261364fb5e4f2353f305d7db9f76258 | [] | no_license | jgatka/great_powers | f57ecfc8504b7b7b0f827668cdf3cfba1d3cc2dd | 4950b4342c8e76893754895c66f0e2e752ae5e58 | refs/heads/master | 2021-06-08T00:49:25.270172 | 2021-05-03T17:53:55 | 2021-05-03T17:53:55 | 137,496,857 | 0 | 0 | null | 2018-07-31T01:12:05 | 2018-06-15T14:25:59 | Python | UTF-8 | Python | false | false | 8,984 | py | # Console app to assign players to great powers for the board game DIPLOMACY
# This code is functional and pragmatic...there's a reason I don't work in the development dept.
# Josh Gatka
# Import libraries
import random
from random import shuffle
# to do list
'''
TODO Need to insert a 'Board number x' statement in between board assignments. This will require a board count variable as well as a current board variable.
'''
# Declare variable for the number of players and set it equal to zero
# full path to the file containing the names of players, you will need to configure this
player_names_source_txt = ''
# generic press any key message string
press_any_key = "Press any key to continue..."
# the number of players that will need to be assigned to a board and a great power
player_count = 0
# The list of all of the great powers in DIPLOMACY
great_powers = ['Austria-Hungary', 'France', 'Germany', 'Great Britain', 'Italy', 'Russia', 'Turkey']
# create a list for all of the player names
player_names = []
current_board = 0
# The number of players that have been assigned a board and a great power
# This variable is used inside of the big assigning loop
num_assigned_players = 0
# Horizontal line for separating boards
horizontal_rule = '*______________________________________________________________________________*\n'
# Declare functions
# Function to check the number of players and return a message:
def check_player_count(player_count):
# Check that there are at least 2 players
if player_count < 2:
print('You need at least 2 players!')
# Check that there are no boards with only one player
elif player_count % 7 == 1:
print('\nYou need at least 1 more player. Otherwise one of the boards will have only 1 player. :(\n ')
# Otherwise, you're good to go, confirm number of players
else:
print('Ok, so %r players!') % player_count
# Calculate the number of boards that will have 7 players (full board)
def num_full_boards(player_count):
num_full_boards = (player_count / 7)
return num_full_boards
# calcuate the number of boards that will be needed which will not have a full table of 7 players
def incomplete_boards(player_count):
if player_count % 7 > 0:
return 1
else:
return 0
# Calculate the number of players who will be playing on a board that does not have 7 players
# These players will be playing a variation of the rules, where they control multiple great powers
def incomplete_board_players(player_count):
num_extra_players = (player_count % 7)
return num_extra_players
# function to get player names from a text file
def get_player_names_from_txt(names_text_file):
r = open(names_text_file, 'r')
for line in r:
player_names.append(line.strip())
player_count = len(player_names)
print('\nThere are %r players.\nThere will be %r full board(s), each with 7 players.\nThere will be %r incomplete board(s), with %r players\n' % (player_count, num_full_boards(player_count), incomplete_boards(player_count), incomplete_board_players(player_count)))
r.close
raw_input(press_any_key)
'''
# Below is the old logic, which required that users manually enter the number of players and their names.
# User inputs number of players, if number is less than 3 they will be forced to choose again
while player_count < 2 or player_count % 7 == 1:
player_count =int(raw_input("How many players?\n>"))
# Check that there are at least three players, and that there are no boards with only one player
check_player_count(player_count)
# Get player names, add them to a list
current_player = 1
while current_player <=1 or current_player <= player_count:
print('Enter name for Player %d.' % current_player)
current_player_name = raw_input('>')
player_names.append(current_player_name)
current_player += 1
'''
get_player_names_from_txt(player_names_source_txt)
# insert blank line
print('\n')
# Randomize the list of players in the list
shuffle(player_names)
# count the number of players in the list
player_count = len(player_names)
# Go through every single name in the list of player names and assign a board and a great power
for i in range(len(player_names)):
# count the number of great powers in the list
count_GP = len(great_powers)
if (num_assigned_players % 7) == 0:
current_board += 1
print(horizontal_rule)
print('Board %d: ' % (current_board))
else:
print('Board %d: ' % (current_board))
# Determine whether or not the great powers list needs to be repopulated
if count_GP == 0 and len(player_names) - i > 6:
# if the great powers list has been depleted, and there are more than 6 players left to be assigned a great power, the list is repopulated with the names of the
# original 7 great powers so that the next 7 players may be assigned a great power.
great_powers = ['Austria-Hungary', 'France', 'Germany', 'Great Britain', 'Italy', 'Russia', 'Turkey']
elif count_GP == 0 and len(player_names) - i == 6:
# print message explaining special rules for a six player board
print('\nSpecial rules for a six player board: Italian units hold in position and defend themselves, but do not support each other. Units belonging to any of the players can support them in their holding position. If Italian units are forced to retreat, they are disbanded.\nBoard %d:' % (current_board))
# repopulate the great powers list with all of the original great powers save for Italy
great_powers = ['Austria-Hungary', 'France', 'Germany', 'Great Britain', 'Russia', 'Turkey']
elif count_GP == 0 and len(player_names) - i == 5:
# print message explaining special rules for a five player board
print('\nSpecial rules for a five player board: Italian and German units hold in position and defend themselves, but do not support each other. Units belonging to any of the players can support them in their holding position. If Italian or German units are forced to retreat, they are disbanded.\nBoard %d:' % (current_board))
# repopulate the great powers list with all of the original great powers save for Italy and Germany
great_powers = ['Austria-Hungary', 'France', 'Great Britain', 'Russia', 'Turkey']
elif count_GP == 0 and len(player_names) - i == 4:
# print message explaining the special rules for a four player board
print('\nSpecial rules for a four player board: One player plays as England, the other three play the following pairs:\nAustria-Hungary & France\nGermany & Turkey\nItaly & Russia\n\nBoard %d:' % (current_board))
# repopulate the great powers list according to the four player rules
great_powers = ['England', 'Austria-Hungary & France', 'Germany & Turkey', 'Italy & Russia']
elif count_GP == 0 and len(player_names) - i == 3:
# print message explaining the special rules for a three player board
print('\nSpecial rules for a three player board: One player controls England, Germany, and Austria. The second player controls Russia & Italy. The third player controls France & Turkey.\nBoard %d:' % (current_board))
# repopulate the great powers list according to the three player rules
great_powers = ['England, Germany & Austria-Hungary', 'Russia & Italy', 'France & Turkey']
elif count_GP == 0 and len(player_names) - i == 2:
# print message explaining the special rules for a two player board
print('\nSpecial rules for a two player board: This board will function as a World War I simulation. One player controls England, France, & Russia. The second player controls Austria-Hungary, Germany, & Turkey. The game begins in 1914. Before the Fall 1914 adjustments, flip a coin. Italy joins the winner of the toss in Spring 1915. The first player to control 24 supply centers wins. This is also an enjoyable way for two new players to learn the rules.\nBoard %d:' % (current_board))
# repopulate the great powers list according to the two player rules
great_powers = ['England, France & Russia', 'Austria-Hungary, Germany & Turkey']
elif count_GP == 0 and len(player_names) - i == 1:
#Throw an exception because we should not have a 1 player board.
print('\nERROR: You should not be seeing this message. Earlier logic should have eliminated the possibility of a one player board\n')
# count the number of great powers in the list
count_GP = len(great_powers)
# generate a random number between 0 and the number of random powers remaining
GP_randomizer = random.randint(0,(count_GP - 1))
# print the player's name and their assigned great power
print(player_names[i] + '\t' + great_powers[GP_randomizer])
# remove great power from the list
great_powers.pop(GP_randomizer)
# number of assigned players + 1
num_assigned_players += 1
# Insert a blank line to separate board assignments
print('\n')
print('Boards & Great Powers have been assigned for all players.\n')
raw_input('Press any key to exit...')
| [
"[email protected]"
] | |
fb8dd3aa1edf34344494f95001b36a368804d797 | 3a0496fca60ffddfbfbf74e55b0e2c044f668771 | /Weather2.py | d5edbb384ed9d4edb1f0a5f1f4598bca9577c332 | [] | no_license | LevBuslaev/python_projects | e50b27f575d2baf23f85e378bd4609af252c7776 | c2c70560dc04eafe3929f256e4fafdc1d7740420 | refs/heads/master | 2020-09-14T20:13:11.924617 | 2019-11-29T06:04:36 | 2019-11-29T06:04:36 | 223,241,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,419 | py | import pyowm
owm = pyowm.OWM('8b02f1beea4840f1ae5c3736e3ea68d3', language = "ru")
place = input("Погода. Введите город: ")
# Search for current weather in Novorossiysk (Russia)
observation = owm.weather_at_place(place)
w = observation.get_weather()
temp = w.get_temperature('celsius')["temp"]
speed = w.get_wind()["speed"] #["deg"]
deg = w.get_wind()["deg"]
# humidity = w.get_humidity()["humidity"]
print( "В городе " + place + " сейчас " + w.get_detailed_status())
print( " Температура сейчас " + str(temp) + "⁰C")
#print( "Ветер сейчас " + str(speed) + " м/сек, " + " Направление ветра " + str(deg) + "⁰C")
if deg >= 0 and deg <= 22.5:
#deg = deg
print(" Ветер - Северный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 22.5 and deg <= 67.5:
print(" Ветер - Северо-Восточный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 67.5 and deg <= 112.5:
print(" Ветер - Восточный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 112.5 and deg <= 157.5:
print(" Ветер - Юго-Восточный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 157.5 and deg <= 202.5:
print(" Ветер - Южный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 202.5 and deg <= 247.5:
print(" Ветер - Юго-Западный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 247.5 and deg <= 292.5:
print(" Ветер - Западный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
elif deg > 292.5 and deg <= 337.5:
print(" Ветер - Северо-Западный, " + str(speed) + " м/сек., " + " Направление ветра " + str(deg) + "⁰")
else:
print(" Штиль " + str(deg) + ' м/сек')
# print(" Влажность воздуха - " + humidity + "%")
print('Справочно: ветер дует в компаc, течение из компаса :)')
#Direction of the wind
input() | [
"[email protected]"
] | |
9b922048184f18e43a4b6d17868455c9307961a0 | 73a8f5027f7bc1937d63cd54d07cd0d0652b6914 | /532_K-diff_Pairs_in_an_Array.py | 30a98a2d01df114fc7da58a6f7fac78a4c824066 | [] | no_license | jincurry/LeetCode_python | 0891cd4879c44cfb67df870de529c1bdd89c9954 | d9c1061fefc5f42474b7dd8e2d3854825eeff45f | refs/heads/master | 2021-06-18T01:50:46.551949 | 2019-05-07T14:00:11 | 2019-05-07T14:00:11 | 115,329,517 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,676 | py | # Given an array of integers and an integer k, you need to find the number of unique
# k-diff pairs in the array. Here a k-diff pair is defined as an integer pair (i, j),
# where i and j are both numbers in the array and their absolute difference is k.
#
# Example 1:
# Input: [3, 1, 4, 1, 5], k = 2
# Output: 2
# Explanation: There are two 2-diff pairs in the array, (1, 3) and (3, 5).
# Although we have two 1s in the input, we should only return the number of unique pairs.
# Example 2:
# Input:[1, 2, 3, 4, 5], k = 1
# Output: 4
# Explanation: There are four 1-diff pairs in the array, (1, 2), (2, 3), (3, 4) and (4, 5).
# Example 3:
# Input: [1, 3, 1, 5, 4], k = 0
# Output: 1
# Explanation: There is one 0-diff pair in the array, (1, 1).
from collections import Counter
class Solution:
def findPairs(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
if k < 0:
return 0
counter = Counter(nums)
result = 0
if k > 0:
for x in counter:
low = x - k
high = x + k
if counter[low] >= 1:
result += 1
if counter[high] >= 1:
result += 1
return result // 2
else:
for x in counter:
if counter[x] >= 2:
result += 1
return result
if __name__ == '__main__':
print(Solution().findPairs([1, 1, 1, 1, 1, 1], k=0))
print(Solution().findPairs([1, 2, 3, 4, 5], k=1))
print(Solution().findPairs([1, 1, 3, 4, 5], k=2))
print(Solution().findPairs([1, 3, 1, 5, 4], k=0)) | [
"[email protected]"
] | |
f98452e0f21a9c1056e0c1d3c53317f9bf629ff5 | 76c14138689216634ca12b5e1bc0947a9866b1b0 | /aa/PYT/file_han/aa.py | 0459b8ea210dea61036e1e28d403896cd6d919c4 | [] | no_license | KSrinuvas/ALL | 4b0c339bfeb8780232f3853e5fc53c40b84c9cb7 | 9251464a27a98f98e1055ebf129d6934a02e8ffc | refs/heads/master | 2020-12-06T21:59:33.449669 | 2020-03-15T17:05:42 | 2020-03-15T17:05:42 | 232,560,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | py | #!/usr/bin/python3
import re
import os
## write a file
fobj = open("aa.txt","w")
print (fobj.mode)
fobj.write("this is srinu\n")
fobj.write("this is my email address [email protected]\n")
fobj.write("good bye\n")
## close file handler
fobj.close()
## read file
fh = open("aa.txt","r")
print (fh.name)
print (fh.mode)
for line in fh.readlines():
line = line.strip()
ll = line.split(' ')
# print (line)
for a in ll:
if re.match(r'srinu',a):
print (a)
fh.seek(1, 0)
print (fh.read(10))
fh.close()
## append
os.rename('aa.txt','new_file.txt')
#os.remove('new_file.txt')
#os.mkdir("new")
#os.rmdir("new")
aa = os.getcwd()
print (aa)
try:
ff = open("new_file1.txt","w")
ff.write("hello\n")
except IOError:
print("Error : can\'t find file")
else:
print ("Written content file write successfully\n");
ff.close()
| [
"[email protected]"
] | |
8e410733b7c830155aaf12fbd8f8472e561c5cda | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/misc-experiments/_FIREBFIRE/grpc/doc/python/sphinx/conf.py | 6e4bfab7d0edbc549c44c0323f157ca1c924de20 | [
"Apache-2.0",
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 3,626 | py | # Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -- Path setup --------------------------------------------------------------
import os
import sys
PYTHON_FOLDER = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..',
'..', '..', 'src', 'python')
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio'))
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio_channelz'))
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio_health_checking'))
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio_reflection'))
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio_status'))
sys.path.insert(0, os.path.join(PYTHON_FOLDER, 'grpcio_testing'))
# -- Project information -----------------------------------------------------
project = 'gRPC Python'
copyright = '2020, The gRPC Authors'
author = 'The gRPC Authors'
# Import generated grpc_version after the path been modified
import grpc_version
version = '.'.join(grpc_version.VERSION.split('.')[:3])
release = grpc_version.VERSION
if 'dev' in grpc_version.VERSION:
branch = 'master'
else:
branch = 'v%s.%s.x' % tuple(grpc_version.VERSION.split('.')[:2])
# -- General configuration ---------------------------------------------------
templates_path = ['_templates']
source_suffix = ['.rst', '.md']
master_doc = 'index'
language = 'en'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = None
# --- Extensions Configuration -----------------------------------------------
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'sphinx.ext.coverage',
'sphinx.ext.autodoc.typehints',
]
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_special_with_doc = True
autodoc_default_options = {
'members': None,
}
autodoc_mock_imports = []
autodoc_typehints = 'description'
# -- HTML Configuration -------------------------------------------------
html_theme = 'alabaster'
html_theme_options = {
'fixed_sidebar': True,
'page_width': '1140px',
'show_related': True,
'analytics_id': 'UA-60127042-1',
'description': grpc_version.VERSION,
'show_powered_by': False,
}
html_static_path = ["_static"]
# -- Options for manual page output ------------------------------------------
man_pages = [(master_doc, 'grpcio', 'grpcio Documentation', [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
texinfo_documents = [
(master_doc, 'grpcio', 'grpcio Documentation', author, 'grpcio',
'One line description of project.', 'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
epub_title = project
epub_exclude_files = ['search.html']
# -- Options for todo extension ----------------------------------------------
todo_include_todos = True
# -- Options for substitutions -----------------------------------------------
rst_epilog = '.. |grpc_types_link| replace:: https://github.com/grpc/grpc/blob/%s/include/grpc/impl/codegen/grpc_types.h' % branch
| [
"[email protected]"
] | |
e518e98488120860bfe9a632e9bdb823a1476118 | 06c41f009777395c38a66abf639cb5717b240e15 | /test/_compat.py | 3e6293e1808f4260efe93824291b3bc82462764c | [
"MIT"
] | permissive | hibtc/madgui-old | 6c7a5f1ecd455f1814a01524446c751255c75052 | 18241115b6c3c3ec8ba76c0ec10a894937ffc3c7 | refs/heads/master | 2021-10-22T16:25:25.371717 | 2016-10-06T14:37:50 | 2016-10-06T14:37:50 | 14,426,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | import unittest
class TestCase(object):
"""
Compatibility layer for unittest.TestCase
"""
try:
assertItemsEqual = unittest.TestCase.assertCountEqual
except AttributeError:
def assertItemsEqual(self, first, second):
"""Method missing in python2.6 and renamed in python3."""
self.assertEqual(sorted(first), sorted(second))
def assertLess(self, first, second):
"""Method missing in python2.6."""
self.assertTrue(first < second)
| [
"[email protected]"
] | |
a8e1aee8144d06a3e5452eaf17d2cc4441c0bca4 | 8eff460026571b1b526863a7bbceb80ae56d87df | /jobs/aaa.py | 0e15baed2922253cec26938a102650415bc42c2c | [] | no_license | binhbt/flask_celery_demo | 847eaced614218c941c5c85e37adc3e0c8133ae7 | ccc5d725040a312a18398e8044e9239b878a0a98 | refs/heads/master | 2020-04-30T20:35:27.762828 | 2017-12-27T09:27:59 | 2017-12-27T09:27:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
任务
"""
from app import celery
@celery.task(name='aaaa')
def aaaa(a, b):
print('Hello job aaaa')
return a + b
@celery.task(name='bbbb')
def bbbb(a, b):
print('Hello job bbbb')
print(a + b)
return a + b
| [
"[email protected]"
] | |
5b847692a21dcb8731491d3c01c91efcf219d254 | 06a34c24f91fe5b6f7511290c423cd0f574c309a | /two/categories/admin.py | 9594306171ab0db4f6ccf8975f951bb8fda39923 | [] | no_license | memoer/nomadcoder-challenger-airbnb-django | d3916f82891a75f56cf3e0dcf2c906bc74b4ba94 | 07158c406e33ed48f410454883bf341335f7b8ec | refs/heads/master | 2023-02-14T04:40:56.185206 | 2021-01-07T17:19:52 | 2021-01-07T17:19:52 | 327,678,663 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | from django.contrib import admin
from . import models
"""
Here are the models you have to create:
"""
@admin.register(models.Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
"name",
"kind",
)
list_filter = (
"kind",
)
search_fields = ("name",) | [
"[email protected]"
] | |
61bdf16b404bf29f47617846ccb8689242f77a38 | a24314b43d51dd8f0875a94676b6263fb5b35c52 | /Mail/main.py | 44fa52679ad9843ccb4f77279ebabbf9e99d8796 | [] | no_license | ShadmanSahil/Flask | 7fb28403960ca519b7c9d5cf0c50844a9b0f7fab | 80b20294631b4d026acd142103980b92dc5b3467 | refs/heads/master | 2022-12-15T04:05:46.482530 | 2020-09-20T02:56:49 | 2020-09-20T02:56:49 | 294,209,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,183 | py | from flask import Flask, render_template, redirect, url_for, request, flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField, TextAreaField
from wtforms.validators import DataRequired, EqualTo, Email
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager, UserMixin, login_required, login_user, logout_user
from flask_mail import Mail, Message
#configs
app=Flask(__name__)
db=SQLAlchemy(app)
app.config['SECRET_KEY']='yoursecretkey'
app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///site.db'
bcrypt=Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def User(user_id):
return User.query.get(user_id)
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = 587
app.config['MAIL_USE_TLS'] = True
mail=Mail(app)
#forms
class RegisterForm(FlaskForm):
username=StringField('Username', validators=[DataRequired(message='This cannot be empty')])
email=StringField('Email', validators=[DataRequired(message='This cannot be empty'), Email(message='Enter a valid email')])
password=PasswordField('Password', validators=[DataRequired(message='This cannot be empty')])
confirm=PasswordField('Confirm Password', validators=[DataRequired(message='This cannot be empty'), EqualTo(password, message='Passwords do not match')])
submit=SubmitField('Register')
class LoginForm(FlaskForm):
email=StringField('Email', validators=[DataRequired(message='This cannot be empty'), Email(message='Enter a valid email')])
password=PasswordField('Password', validators=[DataRequired(message='This cannot be empty')])
submit=SubmitField('Login')
class ComposeForm(FlaskForm):
receiver=StringField('To:', validators=[DataRequired(message='This cannot be empty')])
subject=StringField('Subject:')
content=TextAreaField('Content:', validators=[DataRequired(message='This cannot be empty')])
send=SubmitField('Send')
#models
class User(db.Model, UserMixin):
id=db.Column(db.Integer(), primary_key=True)
username=db.Column(db.String(20), nullable=False)
email=db.Column(db.String(30), nullable=False, unique=True)
password=db.Column(db.String(20), nullable=False)
mails = db.relationship('New', backref='author', lazy=True)
def __repr__(self):
return "User({}, {}, {})".format(self.username, self.email, self.id)
class New(db.Model):
id=db.Column(db.Integer(), primary_key=True)
sender=db.Column(db.String(20), nullable=False)
receiver=db.Column(db.String(20), nullable=False)
subject=db.Column(db.String(20))
content=db.Column(db.String(5000), nullable=False)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return "email {}(to {}, subject {}. {})".format(self.id, self.receiver, self.subject, self.content)
#routes
@app.route('/register', methods=['GET','POST'])
def register():
form=RegisterForm()
if request.method=='GET':
return render_template('register.html', form=form)
email=form.email.data
check=User.query.filter_by(email=email).first()
if not check:
app.config['MAIL_PASSWORD'] = form.password.data
password=bcrypt.generate_password_hash(form.password.data)
user=User(username=form.username.data, email=email, password=password)
db.session.add(user)
db.session.commit()
flash('You have successfully registered!', 'success')
return redirect(url_for('login'))
flash('This email has already been taken')
return render_template('register.html', form=form)
@app.route('/login', methods=['GET','POST'])
def login():
form=LoginForm()
if request.method=='GET':
return render_template('login.html', form=form)
email=form.email.data
user=User.query.filter_by(email=email).first()
if user:
password=form.password.data
check=bcrypt.check_password_hash(user.password, password)
if check:
flash('You have successfully logged in!')
login_user(user)
return redirect(url_for('index'))
else:
flash('Incorrect password')
return render_template('login.html', form=form)
flash('No user under this email. Please register.')
return redirect(url_for('register'))
@app.route('/logout')
@login_required
def logout():
logout_user()
flash('You have logged out!')
return redirect(url_for('login'))
@app.route('/')
@login_required
def index():
return render_template('index.html')
@app.route('/compose', methods=['GET', 'POST'])
@login_required
def compose():
form=ComposeForm()
if request.method=='GET':
return render_template('compose.html', form=form)
app.config['MAIL_USERNAME'] = current_user.email
to=form.receiver.data
sender=current_user.email
subject=form.subject.data
content=form.content.data
msg=Message(subject=subject, sender=sender, recipients=to.split(), body=content)
mail.send(msg)
new=New(receiver=to,sender=sender,subject=subject,content=content, user_id=current_user.id)
db.session.add(new)
db.session.commit()
flash('Email has been sent!')
return redirect(url_for('index'))
@app.route('/sent')
@login_required
def sent():
user=User.query.filter_by(email=current_user.email).first()
page=request.args.get('page', 1, type=int)
mails=New.query.filter_by(author=current_user)
return render_template('sent.html', mails=mails)
@app.route('/edit/<id>', methods=['GET','POST'])
@login_required
def edit(id):
if request.method=='GET':
id=int(id)
mail=New.query.filter_by(id=id).first()
return render_template('edit.html', mail=mail)
mail=New.query.filter_by(id=id).first()
trash=Trash(sender=current_user.id, receiver=mail.receiver, subject=mail.subject, content=mail.content, user_id=current_user.id)
db.session.delete(mail)
db.session.add(trash)
db.session.commit()
flash('Email deleted')
return redirect(url_for('sent'))
if __name__=='__main__':
app.run(debug=True)
| [
"[email protected]"
] | |
1977d062e0c1fde4e2208066bed5949968d5f079 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH.py | 9502e9a6628121290a820f2cb53d6169bac89931 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 12,990 | py | #
# PySNMP MIB module ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH
# Produced by pysmi-0.3.4 at Mon Apr 29 18:52:26 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint")
ecExperimental, = mibBuilder.importSymbols("ESSENTIAL-COMMUNICATIONS-GLOBAL-REG", "ecExperimental")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, Gauge32, MibIdentifier, Unsigned32, IpAddress, TimeTicks, Integer32, Counter32, ModuleIdentity, NotificationType, ObjectIdentity, Bits, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Gauge32", "MibIdentifier", "Unsigned32", "IpAddress", "TimeTicks", "Integer32", "Counter32", "ModuleIdentity", "NotificationType", "ObjectIdentity", "Bits", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
essentialCommunications = MibIdentifier((1, 3, 6, 1, 4, 1, 2159))
ecRoot = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1))
ecProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1, 3))
ecExperimental = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1, 6))
hippiSwitchMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1, 6, 1))
hippiSwitchMIBv103 = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1))
switchObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1))
switchDescription = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: switchDescription.setStatus('mandatory')
switchNumOfPorts = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: switchNumOfPorts.setStatus('mandatory')
sccDescription = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccDescription.setStatus('mandatory')
sccDateTime = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sccDateTime.setStatus('mandatory')
sccAdminStatus = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("reset", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sccAdminStatus.setStatus('mandatory')
sccOperStatus = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("reseting", 2), ("busy", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sccOperStatus.setStatus('mandatory')
backPlaneTable = MibTable((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7), )
if mibBuilder.loadTexts: backPlaneTable.setStatus('mandatory')
backPlaneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1), ).setIndexNames((0, "ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", "backPlaneIndex"))
if mibBuilder.loadTexts: backPlaneEntry.setStatus('mandatory')
backPlaneIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 1), Gauge32())
if mibBuilder.loadTexts: backPlaneIndex.setStatus('mandatory')
backPlaneNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneNumber.setStatus('mandatory')
backPlaneCard = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("unknown", 1), ("parallel", 2), ("serial", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneCard.setStatus('mandatory')
mICPowerUpInitError = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICPowerUpInitError.setStatus('mandatory')
mICHippiParityBurstError = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICHippiParityBurstError.setStatus('mandatory')
mICLinkReady = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICLinkReady.setStatus('mandatory')
mICSourceInterconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICSourceInterconnect.setStatus('mandatory')
mICSourceRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICSourceRequest.setStatus('mandatory')
mICSourceConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICSourceConnect.setStatus('mandatory')
mICSourceLastConnectAttempt = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICSourceLastConnectAttempt.setStatus('mandatory')
mICDestinationInterconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICDestinationInterconnect.setStatus('mandatory')
mICDestinationRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICDestinationRequest.setStatus('mandatory')
mICDestinationConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICDestinationConnect.setStatus('mandatory')
mICByteCounterOverflow = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICByteCounterOverflow.setStatus('mandatory')
mICNumberOfBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICNumberOfBytes.setStatus('mandatory')
mICNumberOfPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICNumberOfPackets.setStatus('mandatory')
mICConnectsSuccessful = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 7, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mICConnectsSuccessful.setStatus('mandatory')
sourceRouteTable = MibTable((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 8), )
if mibBuilder.loadTexts: sourceRouteTable.setStatus('mandatory')
sourceRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 8, 1), ).setIndexNames((0, "ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", "srcIndex"))
if mibBuilder.loadTexts: sourceRouteEntry.setStatus('mandatory')
srcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 8, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srcIndex.setStatus('mandatory')
srcRoute = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 8, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: srcRoute.setStatus('mandatory')
srcWriteRow = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: srcWriteRow.setStatus('mandatory')
destRouteTable = MibTable((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 10), )
if mibBuilder.loadTexts: destRouteTable.setStatus('mandatory')
destRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 10, 1), ).setIndexNames((0, "ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", "destIndex"))
if mibBuilder.loadTexts: destRouteEntry.setStatus('mandatory')
destIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 10, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destIndex.setStatus('mandatory')
destRoute = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 10, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: destRoute.setStatus('mandatory')
destWriteRow = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: destWriteRow.setStatus('mandatory')
huntGroupTable = MibTable((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 12), )
if mibBuilder.loadTexts: huntGroupTable.setStatus('mandatory')
huntGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 12, 1), ).setIndexNames((0, "ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", "hg"))
if mibBuilder.loadTexts: huntGroupEntry.setStatus('mandatory')
hg = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 12, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hg.setStatus('mandatory')
hgOutPortList = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 12, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgOutPortList.setStatus('mandatory')
hgLWriteRow = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hgLWriteRow.setStatus('mandatory')
huntGroupOrderTable = MibTable((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 14), )
if mibBuilder.loadTexts: huntGroupOrderTable.setStatus('mandatory')
huntGroupOrderEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 14, 1), ).setIndexNames((0, "ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", "hg"))
if mibBuilder.loadTexts: huntGroupOrderEntry.setStatus('mandatory')
hgOrderIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 14, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgOrderIndex.setStatus('mandatory')
hgOrderList = MibTableColumn((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 14, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hgOrderList.setStatus('mandatory')
hgOWriteRow = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hgOWriteRow.setStatus('mandatory')
hgSaveRestore = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("save", 1), ("restore", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hgSaveRestore.setStatus('mandatory')
routesSaveRestore = MibScalar((1, 3, 6, 1, 4, 1, 2159, 1, 3, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("save", 1), ("restore", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: routesSaveRestore.setStatus('mandatory')
mibBuilder.exportSymbols("ESSENTIAL-COMMUNICATIONS-HIPPI-SWITCH", switchObjs=switchObjs, hgOrderIndex=hgOrderIndex, mICByteCounterOverflow=mICByteCounterOverflow, sourceRouteEntry=sourceRouteEntry, ecRoot=ecRoot, hgLWriteRow=hgLWriteRow, srcRoute=srcRoute, mICDestinationConnect=mICDestinationConnect, sccOperStatus=sccOperStatus, hgSaveRestore=hgSaveRestore, mICPowerUpInitError=mICPowerUpInitError, huntGroupOrderTable=huntGroupOrderTable, backPlaneEntry=backPlaneEntry, sccDateTime=sccDateTime, mICHippiParityBurstError=mICHippiParityBurstError, hgOutPortList=hgOutPortList, srcIndex=srcIndex, essentialCommunications=essentialCommunications, switchNumOfPorts=switchNumOfPorts, mICDestinationInterconnect=mICDestinationInterconnect, mICNumberOfBytes=mICNumberOfBytes, destRoute=destRoute, mICLinkReady=mICLinkReady, hippiSwitchMIB=hippiSwitchMIB, hippiSwitchMIBv103=hippiSwitchMIBv103, huntGroupTable=huntGroupTable, mICConnectsSuccessful=mICConnectsSuccessful, ecExperimental=ecExperimental, switchDescription=switchDescription, backPlaneTable=backPlaneTable, sccAdminStatus=sccAdminStatus, mICDestinationRequest=mICDestinationRequest, destRouteEntry=destRouteEntry, huntGroupEntry=huntGroupEntry, mICSourceRequest=mICSourceRequest, mICSourceLastConnectAttempt=mICSourceLastConnectAttempt, destIndex=destIndex, mICNumberOfPackets=mICNumberOfPackets, ecProducts=ecProducts, srcWriteRow=srcWriteRow, backPlaneNumber=backPlaneNumber, hgOrderList=hgOrderList, destRouteTable=destRouteTable, backPlaneIndex=backPlaneIndex, mICSourceConnect=mICSourceConnect, sourceRouteTable=sourceRouteTable, destWriteRow=destWriteRow, sccDescription=sccDescription, huntGroupOrderEntry=huntGroupOrderEntry, routesSaveRestore=routesSaveRestore, hg=hg, hgOWriteRow=hgOWriteRow, mICSourceInterconnect=mICSourceInterconnect, backPlaneCard=backPlaneCard)
| [
"[email protected]"
] | |
63f13a578c8ecf536f2c9ea935001a5631100bfc | 3449b0dc06d12f4d4c4fa4476f96d37766cc5f20 | /1011.py | 427c6f527ee626e0cd1f931d87aa385292e4a094 | [] | no_license | ieehee/baekjoon_solutions | 694d05b39ac9323bdd2ddacd5a565d9d20f200c9 | 183f97a6bfda6a787ee33f89674ac41eef9d145b | refs/heads/main | 2023-08-30T18:44:39.721261 | 2021-10-11T06:11:32 | 2021-10-11T06:11:32 | 410,890,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | import sys
import math
input = sys.stdin.readline
T = int(input())
Ds = []
for i in range(T):
x, y = map(int, input().split())
Ds.append(y-x)
for D in Ds:
D_isqrt = math.isqrt(D)
if D_isqrt**2 == D:
print(2*D_isqrt-1)
elif D_isqrt**2+D_isqrt < D:
print(2*D_isqrt+1)
else:
print(2*D_isqrt)
| [
"[email protected]"
] | |
08cb2af99063d8577b2e342771292d39a6b09887 | 0d6a506fe4dcd83a7e735a9df78b39ce703c5a35 | /pro_div.py | 55adab5487ba3f64424e6ea7cda6f75eec93ba5e | [] | no_license | Ponkiruthika112/codeset6 | 96ad146cea3fe10fff2963c72a6276381b5f926a | 274efc8bbbf61f9838cdb8ff56472b7f8185f51e | refs/heads/master | 2020-04-16T01:33:02.069006 | 2019-05-17T11:26:50 | 2019-05-17T11:26:50 | 165,178,165 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | #for division
n,k,m=map(int,input().split())
print((n*k)//m)
| [
"[email protected]"
] | |
7caad4232101dbe5906bc3ef94c1956949892956 | 433423e83b2d1ae582e0a45acc829473c98af7ab | /create_db.py | c430632bc30a8f0d6aa08bf01e6d2f6527326c65 | [] | no_license | wojlas/messenger | b04489933ec608249d0a78012abdbe7975f2cdd2 | c73ac8de225dc4d1f40af4f44930990a60f5fdec | refs/heads/main | 2023-05-04T18:22:33.444941 | 2021-05-27T14:51:49 | 2021-05-27T14:51:49 | 367,376,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,610 | py | import re
from psycopg2 import connect, OperationalError
from psycopg2.errors import DuplicateDatabase, DuplicateTable
'''Program to create database (db name input by user) and tables for users and messages'''
# db_name = input("Enter name of database: ")
db_name = 'messenger_db'
create_users = """
create table users (
id serial,
username varchar(255),
hashed_password varchar(80) unique,
primary key(id)
);"""
create_messages = """
create table messages (
id serial,
from_id int,
to_id int,
creation_date timestamp,
text varchar(255),
primary key(id),
foreign key(from_id) references users(id) on delete cascade,
foreign key(to_id) references users(id) on delete cascade
);"""
#login data
db_user = "postgres"
db_password = "coderslab"
host = "127.0.0.1"
def execute_sql(sql_code, db_name):
"""
Run given sql code with psycopg2.
:param str sql_code: sql code to run
:param str db: name of db,
:rtype: list
:return: data from psycobg2 cursor as a list (can be None) if nothing to fetch.
"""
result = None
try:
cnx = connect(user=db_user, password=db_password, host=host, database=db_name)
cnx.autocommit = True
cursor = cnx.cursor()
cursor.execute(sql_code)
if re.match(r'(?i)select', sql_code):
result = cursor.fetchall()
print("Sukces")
except OperationalError as e:
print("Błąd!", e)
return
cursor.close()
cnx.close()
return result
def create_db():
'''database creation function
use function execute_sql to connect witch psql
if database are created successfully print Database created
if database already exist, print appropriate text'''
try:
execute_sql(sql_code= f"create database {db_name};",
db_name='')
return "Database created!"
except DuplicateDatabase:
return "Database already exist"
pass
def create_users_table():
# create table users in database
try:
execute_sql(sql_code= create_users,
db_name=db_name)
return "Table users created!"
except DuplicateTable:
return "Table Users already exist"
def create_messages_table():
# create table messages in database
try:
execute_sql(sql_code=create_messages,
db_name=db_name)
return "Table Messages created!"
except DuplicateTable:
return "Table Messages already exist"
# print(create_db())
# print(create_users_table())
# print(create_messages_table()) | [
"[email protected]"
] | |
0a5d8fa69f0f67bd5c2fa889f4ebc90c216b0110 | f37e02dc0f48ec634dbd0600db8bf67e5e3cda0a | /test/unit/common/middleware/test_proxy_logging.py | cd5c20661feff3b7c65af59fd657e1c0e6dde800 | [
"Apache-2.0"
] | permissive | haluomao/swift | ff7b8ad013ff25f3f6073f9144e04640aa9913fe | d668b27c0967bfe47e3d91d50600c9be5dc653a1 | refs/heads/master | 2020-12-25T04:07:52.111753 | 2012-05-28T18:36:59 | 2012-05-28T18:36:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,888 | py | # Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from urllib import quote, unquote
import cStringIO as StringIO
from webob import Request
from swift.common.middleware import proxy_logging
class FakeApp(object):
def __init__(self, body=['FAKE APP']):
self.body = body
def __call__(self, env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
while env['wsgi.input'].read(5):
pass
return self.body
class FileLikeExceptor(object):
def __init__(self):
pass
def read(self, len):
raise IOError('of some sort')
def readline(self, len=1024):
raise IOError('of some sort')
class FakeAppReadline(object):
def __call__(self, env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
line = env['wsgi.input'].readline()
return ["FAKE APP"]
class FakeLogger(object):
def __init__(self, *args, **kwargs):
self.msg = ''
def info(self, string):
self.msg = string
def start_response(*args):
pass
class TestProxyLogging(unittest.TestCase):
def test_basic_req(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = ''.join(resp)
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[3], 'GET')
self.assertEquals(log_parts[4], '/')
self.assertEquals(log_parts[5], 'HTTP/1.0')
self.assertEquals(log_parts[6], '200')
self.assertEquals(resp_body, 'FAKE APP')
self.assertEquals(log_parts[11], str(len(resp_body)))
def test_multi_segment_resp(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(
['some', 'chunks', 'of data']), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = ''.join(resp)
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[3], 'GET')
self.assertEquals(log_parts[4], '/')
self.assertEquals(log_parts[5], 'HTTP/1.0')
self.assertEquals(log_parts[6], '200')
self.assertEquals(resp_body, 'somechunksof data')
self.assertEquals(log_parts[11], str(len(resp_body)))
def test_log_headers(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
headers = unquote(log_parts[14]).split('\n')
self.assert_('Host: localhost:80' in headers)
def test_upload_size(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': StringIO.StringIO('some stuff')})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[10], str(len('some stuff')))
def test_upload_line(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(),
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': StringIO.StringIO(
'some stuff\nsome other stuff\n')})
resp = app(req.environ, start_response)
exhaust_generator = ''.join(resp)
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[10], str(len('some stuff\n')))
def test_log_query_string(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'x=3'})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
self.assertEquals(unquote(log_parts[4]), '/?x=3')
def test_client_logging(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4'})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[0], '1.2.3.4') # client ip
self.assertEquals(log_parts[1], '1.2.3.4') # remote addr
def test_proxy_client_logging(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4',
'HTTP_X_FORWARDED_FOR': '4.5.6.7,8.9.10.11'
})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[0], '4.5.6.7') # client ip
self.assertEquals(log_parts[1], '1.2.3.4') # remote addr
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4',
'HTTP_X_CLUSTER_CLIENT_IP': '4.5.6.7'
})
resp = app(req.environ, start_response)
exhaust_generator = [x for x in resp]
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[0], '4.5.6.7') # client ip
self.assertEquals(log_parts[1], '1.2.3.4') # remote addr
def test_facility(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes', 'access_log_facility': 'whatever'})
def test_filter(self):
factory = proxy_logging.filter_factory({})
self.assert_(callable(factory))
self.assert_(callable(factory(FakeApp())))
def test_unread_body(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(['some', 'stuff']), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
read_first_chunk = next(resp)
resp.close() # raise a GeneratorExit in middleware app_iter loop
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[6], '499')
self.assertEquals(log_parts[11], '4') # write length
def test_disconnect_on_readline(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
body = ''.join(resp)
except Exception:
pass
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[6], '499')
self.assertEquals(log_parts[10], '-') # read length
def test_disconnect_on_read(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(['some', 'stuff']), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
body = ''.join(resp)
except Exception:
pass
log_parts = app.access_logger.msg.split()
self.assertEquals(log_parts[6], '499')
self.assertEquals(log_parts[10], '-') # read length
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
51a800c9d6f72833820a5827ee522625cc322e60 | 3ac9ea18fb3c0d877ff7abc1710988d30617ff2f | /about/views.py | 9b7fa396ec2d60f1a317554c08a5db6c6e3c0a31 | [] | no_license | sammonsg/personal_site | b57ea02517a2d1be18aadf92ad5a8b241c069658 | 3be3757d6dedf5a6a98ca1f8208d4ad0cbd55a78 | refs/heads/master | 2021-05-12T01:52:19.672047 | 2019-02-06T18:32:26 | 2019-02-06T18:32:26 | 117,567,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | from django.views import generic
class AboutMeView(generic.TemplateView):
template_name = 'about/about.html'
| [
"[email protected]"
] | |
04c5a72b50268b73eaa66cd37bff8a604878c8d6 | 1ae4de8ee7e43a7409a8b82c75a8a495916913ad | /otzi/agenda/migrations/0002_auto_20200407_1413.py | c657dc98d896e4667187a2ac567dcbad802c4387 | [] | no_license | GabrielUlisses/Otzi-tattoo | c688b8fd7ca36045f4853157e3e07c9bb13ddc73 | 65c351b872d2c3f720578851eb1d259b25efd71a | refs/heads/master | 2022-12-25T12:53:35.674078 | 2020-09-23T14:25:34 | 2020-09-23T14:25:34 | 297,993,750 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,101 | py | # Generated by Django 3.0.3 on 2020-04-07 17:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tatuador', '0003_auto_20200407_1413'),
('agenda', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='agenda',
options={'ordering': ['data_criacao']},
),
migrations.AlterField(
model_name='agenda',
name='tatuador',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tatuador.Tatuador'),
),
migrations.AlterField(
model_name='configuracaoagenda',
name='dia',
field=models.CharField(choices=[('', ''), ('segunda', 'Segunda'), ('terca', 'Terça'), ('quarta', 'Quarta'), ('quinta', 'Quinta'), ('sexta', 'Sexta'), ('sabado', 'Sábado'), ('domingo', 'Domingo'), ('feriado_nacional', 'Feriados Nacionais'), ('feriado_religioso', 'Feriados Religiosos')], default='', max_length=17, verbose_name='Dia'),
),
]
| [
"[email protected]"
] | |
8ea5e884675f24b3efd9298b90c893e3ca4606c1 | c105797a5b6f5aca0b892ccdadbb2697f80fb3ab | /python_base/base9/base9_6/test_base9_6_8.py | 0f4b4c47bf83ee51ee523ad09b3439685b825c78 | [] | no_license | jj1165922611/SET_hogwarts | 6f987c4672bac88b021069c2f947ab5030c84982 | fbc8d7363af0a4ac732d603e2bead51c91b3f1f7 | refs/heads/master | 2023-01-31T19:41:27.525245 | 2020-12-15T13:43:45 | 2020-12-15T13:43:45 | 258,734,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 423 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020-05-17
# @Author : Joey Jiang
# @File : test_9_6_8.py
# @Software : Pycharm
# @Description: 测试报告美化与定制
'''
通过allure.attach("xxxx",attachment_type,exntension)加入纯文本信息
'''
import pytest
import allure
def test_attach_text():
allure.attach("这是一个纯文本",attachment_type=allure.attachment_type.TEXT)
| [
"[email protected]"
] | |
f75d19aec8e44ee58ebf1aef83298f22ffb63d04 | 5a19bb7d00a109a140301423c9251c70d0561b9f | /Activity_03.py | 54f48ccbeb2b34db5d9484e907970b37e1eddbd1 | [] | no_license | prasunranjan35/prasunranjan35 | 3a67fe3e8ea30e0977b037ebcba21f088e6d83fb | 58840dfca717ad8bb9f257d21bedfb8b61dda4c9 | refs/heads/main | 2023-07-18T11:16:17.869022 | 2021-08-28T16:48:54 | 2021-08-28T16:48:54 | 400,727,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | //for concatinating once.
first= "Good"
second = "Morning"
print (first + second)
//for concatinating five times.
first= "Good"
second = "Morning"
print ((first + second)*5)
//concatinating in single line with spaces.
first= "Good"
second = "Morning "
print ((first + second)*5)
// concatinating with /n.
first= "Good"
second = "Morning\n "
print ((first + second)*5)
| [
"[email protected]"
] | |
de5dfc99ab3202da4b172ac063dd3210b980fed1 | 914a83057719d6b9276b1a0ec4f9c66fea064276 | /test/performance-regression/full-apps/qmcpack/nexus/library/xmlreader.py | 624681b744438e1c0543befdb9c7001fa8710c08 | [
"LicenseRef-scancode-unknown-license-reference",
"NCSA",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later",
"BSD-2-Clause"
] | permissive | jjwilke/hclib | e8970675bf49f89c1e5e2120b06387d0b14b6645 | 5c57408ac009386702e9b96ec2401da0e8369dbe | refs/heads/master | 2020-03-31T19:38:28.239603 | 2018-12-21T20:29:44 | 2018-12-21T20:29:44 | 152,505,070 | 0 | 0 | Apache-2.0 | 2018-10-11T00:02:52 | 2018-10-11T00:02:51 | null | UTF-8 | Python | false | false | 13,595 | py |
from xml.parsers import expat
from numpy import array
import sys
import keyword
import re
import os
from inspect import getmembers
from superstring import \
find_matching_pair, \
remove_pair_sections, \
remove_empty_lines, \
valid_variable_name,\
string2val
#from abilities import AllAbilities
from generic import obj
class XMLelement(obj):
def _escape_name(self,name):
if name in self._escape_names:
name=name+'_'
#end if
return name
#end def escape_name
def _set_parent(self,parent):
self._parent=parent
return
#end def set_parent
def _add_xmlattribute(self,name,attribute):
self._attributes[name]=attribute
return
#end def add_attribute
def _add_element(self,name,element):
element._name=name
self._elements[name]=element
return
#end def add_element
def _add_text(self,name,text):
self._texts[name]=text
return
#end def add_text
def _to_string(self):
s=''
if len(self._attributes)>0:
s+=' attributes:\n'
for k,v in self._attributes.iteritems():
s+= ' '+k+' = '+str(v)+'\n'
#end for
#end if
if len(self._elements)>0:
s+= ' elements:\n'
for k,v in self._elements.iteritems():
s+= ' '+k+'\n'
#end for
#end if
if len(self._texts)>0:
s+= ' texts:\n'
for k,v in self._texts.iteritems():
s+= ' '+k+'\n'
#end for
#end if
return s
#end def list
# def __str__(self):
# return self._to_string()
# #end def __str__
#
# def __repr__(self):
# return self._to_string()
# #end def __repr__
def __init__(self):
self._name=''
self._parent=None
self._elements=obj()
self._texts=obj()
self._attributes=obj()
self._element_counts=obj()
self._ntexts=0
self._escape_names=None
#self._escape_names=set(dict(getmembers(self)).keys()) | set(keyword.kwlist)
self._escape_names=set(keyword.kwlist)
return
#end def __init__
def condense(self):
for name,elem in self._elements.iteritems():
if isinstance(elem,XMLelement):
elem.condense()
#end if
#end if
cnames = []
for name in self._elements.keys():
if name[-1]=='1' and not name[-2].isdigit():
cnames.append(name[:-1])
#end if
#end if
for cname in cnames:
cmax = 1
for name,elem in self._elements.iteritems():
ns = name.split(cname)
if len(ns)==2 and ns[1].isdigit():
cmax = max(cmax,int(ns[1]))
#end if
#end if
names = set()
for n in range(1,cmax+1):
name = cname+str(n)
names.add(name)
#end for
not_present = names-set(self._elements.keys())
if len(not_present)==0:
collection = []
for n in range(1,cmax+1):
name = cname+str(n)
collection.append(self._elements[name])
del self._elements[name]
del self[name]
#end for
self._elements[cname] = collection
self[cname] = collection
#end if
#end for
#end def condense
def convert_numeric(self):
for name,attr in self._attributes.iteritems():
self[name] = string2val(attr)
#end for
if 'text' in self:
self.value = string2val(self.text)
del self.text
#end if
texts = []
for name,elem in self._elements.iteritems():
if isinstance(elem,XMLelement):
if 'text' in elem and len(elem._attributes)==0 and len(elem._elements)==0:
self[name] = string2val(elem.text)
texts.append(name)
else:
elem.convert_numeric()
#end if
#end if
#end if
for name in texts:
self._elements[name] = self[name]
#end for
#end def convert_numeric
def remove_hidden(self):
for name,elem in self._elements.iteritems():
if isinstance(elem,XMLelement):
elem.remove_hidden()
elif isinstance(elem,list):
for e in elem:
if isinstance(e,XMLelement):
e.remove_hidden()
#end if
#end for
#end if
#end for
remove = []
for name,value in self.iteritems():
if str(name)[0]=='_':
remove.append(name)
#end if
#end for
for name in remove:
del self[name]
#end for
#end def remove_hidden
#end class XMLelement
'''
class XMLReader
reads an xml file and creates a dynamic object out of its contents
'''
class XMLreader(obj):
def __init__(self,fpath=None,element_joins=None,element_aliases=None,contract_names=False,strip_prefix=None,warn=True,xml=None):
if element_joins is None:
element_joins = []
if element_aliases is None:
element_aliases = {}
#assign values
self.fpath=fpath
if fpath is None:
self.base_path = None
else:
self.base_path = os.path.split(fpath)[0]
#end if
self.element_joins = set(element_joins)
self.element_aliases = element_aliases
self.contract_names = contract_names
self.strip_prefix = strip_prefix
self.warn = warn
#create the parser
self.parser = expat.ParserCreate()
self.parser.StartElementHandler = self.found_element_start
self.parser.EndElementHandler = self.found_element_end
self.parser.CharacterDataHandler = self.found_text
self.parser.AttlistDeclHandler = self.found_attribute
self.parser.returns_unicode = 0
#read in xml file
if xml is None:
fobj = open(fpath,'r')
self.xml = fobj.read()
else:
self.xml = xml
#end if
#remove all comments
pair='<!--','-->'
self.xml = remove_pair_sections(self.xml,pair)
#process includes
while self.xml.find('<include')!=-1:
self.include_files()
self.xml = remove_pair_sections(self.xml,pair)
#end while
#remove empty lines
self.xml = remove_empty_lines(self.xml)
#print self.xml
#parse the xml and build the dynamic object
self.nlevels=1
self.ilevel=0
self.pad=''
# Set the current xml element
self.obj = XMLelement()
self.cur=[self.obj]
self.parser.Parse(self.xml,True)
#the expat parser is troublesome in that it
# -does not have typical class members
# -is unpickleable
# therefore it is removed after the dynamic object is built
del self.parser
return
#end def __init__
def include_files(self):
pair = '<include','/>'
qpair = '<?','?>'
ir=0
while ir!=-1:
il,ir = find_matching_pair(self.xml,pair,ir)
if ir!=-1:
cont = self.xml[il:ir].strip(pair[0]).rstrip(pair[1])
fname = cont.split('=',1)[1].strip().strip('"')
fobj = open(os.path.join(self.base_path,fname),'r')
fcont = fobj.read()
fcont = remove_pair_sections(fcont,qpair)
fobj.close()
self.xml = self.xml.replace(self.xml[il:ir],fcont)
#end if
#end while
return
#end def include_files
def increment_level(self):
self.ilevel+=1
self.nlevels = max(self.ilevel+1,self.nlevels)
if self.ilevel+1==self.nlevels:
self.cur.append(None)
#end if
self.pad = self.ilevel*' '
return
#end def increment_level
def decrement_level(self):
self.ilevel-=1
self.pad = self.ilevel*' '
return
#end def decrement_level
def found_element_start(self,ename,attributes):
#print self.pad,name,attributes
cur = self.cur[self.ilevel]
if ename in self.element_aliases.keys():
if self.element_aliases[ename].find('attributes')!=-1:
exec 'name = '+self.element_aliases[ename]
else:
name = self.element_aliases[ename]
#end if
else:
name=ename
#end if
#alter the name if it is a python keyword
name = cur._escape_name(name)
if self.contract_names:
name = name.lower().replace('-','_')
#end if
if self.strip_prefix!=None:
if name.startswith(self.strip_prefix):
name = name.split(self.strip_prefix)[1]
#end if
#end if
# joinable = in joins and no attributes
# if in elements and joinable: don't add
# else if not in elements and joinable: add unnumbered
# else if not in elements: add unnumbered
# else: add numbered, if number==1: rename first element
joinable = name in self.element_joins and len(attributes.keys())==0
epattern = re.compile(name+'\d+')
in_elements=False
for k in cur._elements.keys():
if epattern.match(k) or k==name:
in_elements=True
#end if
#end for
#in_elements = name in cur._elements.keys()
if in_elements and joinable:
#check if there is a previous unjoinable element w/ same name
if len(cur._elements[name]._attributes)!=0:
#rename the prior element as a numbered one
nelements=cur._element_counts[name]
if nelements==1:
#it should be the first one
newname = name+str(1)
cur[newname]=cur[name]
cur._add_element(newname,cur[newname])
del cur._elements[name]
del cur[name]
else:
print 'prior unjoinable element is not the first'
print ' this should be impossible, stopping'
sys.exit()
#end if
#add the joinable element as unnumbered
# later joinable elements will be joined to this one
cur[name] = XMLelement()
cur._add_element(name,cur[name])
#end if
elif not in_elements:
#add unnumbered
cur[name] = XMLelement()
cur._add_element(name,cur[name])
cur._element_counts[name]=1
else:
#add in a numbered way
nelements=cur._element_counts[name]
if nelements==1:
#rename the first element
newname = name+str(1)
cur[newname]=cur[name]
cur._add_element(newname,cur[newname])
del cur._elements[name]
del cur[name]
#end if
nelements+=1
newname = name + str(nelements)
cur[newname] = XMLelement()
cur._add_element(newname,cur[newname])
cur._element_counts[name]=nelements
name = newname
#end if
cur._elements[name]._parent = cur #mark change
self.increment_level()
self.cur[self.ilevel] = cur._elements[name]
cur = self.cur[self.ilevel]
for kraw,v in attributes.iteritems():
if self.contract_names:
k = kraw.lower().replace('-','_')
else:
k = kraw
#end if
if valid_variable_name(k):
kname = cur._escape_name(k)
cur[kname] = v
cur._add_xmlattribute(kname,cur[kname])
else:
if self.warn:
print 'xmlreader warning: attribute '+k+' is not a valid variable name and has been ignored'
#end if
#end if
#end for
return
#end def found_element_start
def found_element_end(self,name):
self.cur[self.ilevel]=None
self.decrement_level()
#print self.pad,'end',name
return
#end def found_element_end
def found_text(self,rawtext):
text = rawtext.strip()
if text!='':
#print self.pad,text
cur = self.cur[self.ilevel]
if cur._ntexts>0:
cur.text+='\n'+text
else:
cur.text = text
cur._add_text('text',cur.text)
cur._ntexts+=1
#end if
#end if
return
#end def found_text
def found_attribute(self,ename,aname,atype,default,required):
return
#end def found_attribute
#end class XMLreader
def readxml(fpath=None,element_joins=None,element_aliases=None,contract_names=False,strip_prefix=None,warn=True,xml=None):
xr = XMLreader(fpath,element_joins,element_aliases,contract_names,strip_prefix,warn,xml=xml)
return xr.obj
#end def readxml
| [
"[email protected]"
] | |
f003f5dc5f861f199a64d060c129e8a400227306 | 1057a60b163614132d98af60eba446a55b64c77d | /chal223.py | 04e0fa8a790b25c4dc2ab5f79c6575169f69b3dd | [] | no_license | Rossel/Solve_250_Coding_Challenges | 92e26d7a74e0f7e815cab99537b15021bc2f33a0 | 4952dd029fdf03ce44d0255ab8c7ac226cff8943 | refs/heads/master | 2022-12-10T00:46:35.634534 | 2020-08-30T16:58:32 | 2020-08-30T16:58:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import re
s = "Bitcoin was born on Jan 3rd 2009 as an alternative to the failure of the current financial system. In 2017, the price of 1 BTC reached $20000, with a market cap of over $300B."
result = re.findall(r"\s(o.{1})\s", s)
print(result) | [
"[email protected]"
] | |
3dfea0ebbf7a2c927802fb62e197cf25999c8779 | 4208608dc1526be3fee8be014c509d1e2a2a6e33 | /g.py | 943bee3a6c549116c0b2530e12920aea687085b4 | [] | no_license | fallquitor/total-recall | 9af9493112e3faf06ec0977eda8e2ad207a0309c | 61f82b0f4c7a14016091dc4c8102e7073363fd6c | refs/heads/main | 2023-07-19T11:42:19.018267 | 2021-09-11T14:22:57 | 2021-09-11T14:22:57 | 405,384,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | import sys
sys.stdin = open('input.txt', 'r')
sys.stdout = open('output.txt', 'w')
input()
a = [int(s) for s in input().split()]
a.sort()
for i in a:
print(i, end=' ') | [
"[email protected]"
] | |
de2f9eba909cd4798dd79a6168d97f1984747a94 | db68c1b3e5e70ff1274b8fae1ae3ed974e4893ba | /Phone Number & Email.py | bb766b7c4821f4a3ffd1fba2a2c26b35f5e67513 | [] | no_license | Sipherx/Automate-the-Boring-Stuff-with-Python-2015- | 6d977158cf30129b74a048d2e790decae03176c7 | e505d10ce4e934cd9acad7437bba2ae60002d8f7 | refs/heads/master | 2021-01-10T15:45:25.637310 | 2016-02-01T20:47:32 | 2016-02-01T20:47:32 | 47,432,805 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,278 | py | #! python3.5
# phone & email - finds phone numbers and email addresses on the clipboard
import pyperclip, re
#get phone number
phoneRegex = re.compile(r'''(
(\d{3}|\(\d{3}\))? # area code
(\s|-|\.)? # separator
(\d{3}) # first 3 digits
(\s|-|\.) # separator
(\d{4}) # last 4 digits
(\s*(ext|x|ext.)\s*(\d{2,5}))? # extension
)''', re.VERBOSE)
#get email
emailRegex = re.compile(r'''(
[a-zA-Z0-0._%+-]+ # username
@
[a-zA-Z0-9.-]+ # domain name
(\.[a-zA-Z]{2,4}) # dot-something
)''', re.VERBOSE)
# find matches in clipboard text.
text = str(pyperclip.paste())
matches = []
for groups in phoneRegex.findall(text):
phoneNum = '-'.join([groups[1], groups[3], groups[5]])
if groups[8] != '':
phoneNum += ' x' + groups[8]
matches.append(phoneNum)
for groups in emailRegex.findall(text):
matches.append(groups[0])
# copy results to clipboard
if len(matches) > 0:
pyperclip.copy('\n'.join(matches))
print('Copied to clipboard:')
print('\n'.join(matches))
else:
print('No phone numbers or email address found.')
| [
"[email protected]"
] | |
20b78a5359a2bbe963630a2f725cfe1fcfc2388d | 5fcf670ceba578656d00a90e2ef2aaae27e91008 | /make_figures.py | b63323eeb9626a9bfe715df33759d9f3709ab053 | [] | no_license | marcharper/Yen | 043613b82340b5445a8a3074cc80f419f90b7dc6 | c35c538e62341284c934709e5c65fd81b21af5bd | refs/heads/master | 2020-05-17T19:27:57.794530 | 2016-09-24T02:35:35 | 2016-09-24T02:35:35 | 40,083,366 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,828 | py | """
Systematically produce many yen-related plots.
"""
import math
import matplotlib
#matplotlib.use('AGG')
font = {'size': 20}
matplotlib.rc('font', **font)
from matplotlib import pyplot
import colormaps as cmaps
pyplot.register_cmap(name='viridis', cmap=cmaps.viridis)
pyplot.set_cmap(cmaps.viridis)
from decompositions import *
def ensure_directory(directory):
"""Checks if a directory exists, if not makes it."""
if not os.path.isdir(directory):
os.mkdir(directory)
def ensure_digits(num, s):
"""Prepends a string s with zeros to enforce a set num of digits."""
if len(s) < num:
return "0"*(num - len(s)) + s
return s
# Sample matrices for fitness landscapes
def two_type_matrices():
matrices = [
[[1, 1], [0, 1]], # tournament
[[1, 1], [1, 1]], # neutral
[[2, 2], [1, 1]], # classic Moran
[[1, 2], [2, 1]], # hawk-dove
[[1, 3], [2, 1]], # asymmetric hawk-dove
[[2, 1], [1, 2]], # coordination
]
return matrices
def three_type_matrices():
"""Returns the matrices in I.M. Bomze's classifications."""
matrices = list(bomze_matrices())
return matrices
# Yen Decompositions Figures
# Two type populations
def decomposition_bar_charts(N=30, directory="two_type_decompositions"):
# Decomposition Bar Charts, two types
ensure_directory(directory)
for i, m in enumerate(two_type_matrices()):
decomposition_bar_chart(N, m)
filename = os.path.join(directory, "%s.png" % (i,))
pyplot.savefig(filename)
pyplot.clf()
# Three type populations
def heatmaps_bomze(N=40, mu=None, beta=0.1, directory="three_type_decompositions"):
if not mu:
mu = 3. / (2 * N)
ensure_directory(directory)
matrices = list(three_type_matrices())
for i, m in enumerate(matrices):
for index_1, index_2 in [(0, 1), (1, 2), (2, 0), (1, 0), (2, 1), (0, 2)]:
print i, index_1, index_2
fig = decomposition_heatmaps_3(N, m, mu=mu, beta=beta, index_1=index_1, index_2=index_2)
j = ensure_digits(2, str(i))
filename = os.path.join(directory, "%s_%s_%s.png" % (j, index_1, index_2))
pyplot.savefig(filename, dpi=200)
pyplot.close(fig)
pyplot.clf()
def max_decomp_plots(N=40, mu=None, beta=0.1, directory="three_type_max_decomp"):
if not mu:
#mu = 1./N
mu = 3. / (2 * N)
ensure_directory(directory)
matrices = list(three_type_matrices())
for i, m in enumerate(matrices):
print i
fig = decomposition_maximum_component_figure(N, m, mu=mu, beta=beta)
j = ensure_digits(2, str(i))
filename = os.path.join(directory, "%s.png" % (j,))
pyplot.savefig(filename, dpi=200)
pyplot.close(fig)
pyplot.clf()
def max_decomp_test(N=30, mu=None, beta=0.1, directory="three_type_max_decomp"):
if not mu:
#mu = 1./N
mu = 3. / (2 * N)
ensure_directory(directory)
matrices = list(three_type_matrices())
m = matrices[7]
fig = decomposition_maximum_component_figure(N, m, mu=mu, beta=beta, cmap=cmaps.viridis)
filename = os.path.join(directory, "test.png")
pyplot.savefig(filename, dpi=400)
pyplot.close(fig)
pyplot.clf()
if __name__ == "__main__":
#print "Generating figures -- this will take some time."
#decomposition_bar_charts(N=40)
#heatmaps_bomze(N=60)
#max_decomp_plots(N=60)
#N = 60
#mu = 1./ math.pow(N, 1. / 2)
# mu = 3./ (2*N)
#m = list(bomze_matrices())[16]
# m = [[0,1,1], [1,0,1], [1,1,0]]
#figure = decomposition_heatmaps_3(N=N, m=m, mu=mu, beta=1, index_1=0, index_2=1)
#pyplot.show()
# decomposition_bar_charts(N=40)
#heatmaps_bomze(N=60)
max_decomp_plots(N=60)
#max_decomp_test(N=60)
| [
"[email protected]"
] | |
9a7f12a7d94131439e2ea9a5803fd5047e2d7f2b | d94b93857ffec7e47a0d23fb9cc5caf4e412c797 | /main.py | e7764d962098d4f61691ac7bb502e2abac3ed143 | [] | no_license | njardus/Alpha-Vantage-test1 | e91c77a54286b039c4f5cf74e4bae50c7020609f | e5d6f1c2c7dbff38fa182140c9128a9d19ab72ac | refs/heads/master | 2020-04-12T17:07:48.725145 | 2018-12-26T17:48:34 | 2018-12-26T17:48:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | from loguru import logger
# import btest
import algo
# import alpaca_trade_api as tradeapi
# Todo: Implement backtesting
# Todo: Implement paper trading
logger.info("Project name: Alpha-Vantage-test1")
logger.info("--------------------------")
logger.info("Program started")
if __name__ == '__main__':
logger.info("__name__ is __main__, so enter main program.")
algo.main() | [
"[email protected]"
] | |
3f9ae56a530dbadac380e2eb01886794049e10da | f2508081e781a07cc362279c8ca71a40c3f1afc0 | /mytodoism/apis/v1/resources.py | 4578a15a127bc3564279735b47a8af23c23441ae | [
"MIT"
] | permissive | xlb233/mytodoism | 2aed103226a8e22ad9d6709d4e97664f4d265c39 | d3d719ffad414542939f2d273c39a1275b1a7dee | refs/heads/main | 2023-02-03T04:45:27.605372 | 2020-12-24T08:04:24 | 2020-12-24T08:04:24 | 322,778,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,119 | py | from flask import jsonify, request, current_app, url_for, g
from flask.views import MethodView
from mytodoism.apis.v1 import api_v1
from mytodoism.apis.v1.auth import auth_required, generate_token
from mytodoism.apis.v1.errors import api_abort, ValidationError
from mytodoism.apis.v1.schemas import user_schema, item_schema, items_schema
from mytodoism.extensions import db
from mytodoism.models import User, Item
def get_item_body():
data = request.get_json()
body = data.get('body')
if body is None or str(body).strip() == '':
raise ValidationError('The item body was empty or invalid.')
return body
class IndexAPI(MethodView):
def get(self):
return jsonify({
"api_version": "1.0",
"api_base_url": "http://example.com/api/v1",
"current_user_url": "http://example.com/api/v1/user",
"authentication_url": "http://example.com/api/v1/token",
"item_url": "http://example.com/api/v1/items/{item_id }",
"current_user_items_url": "http://example.com/api/v1/user/items{?page,per_page}",
"current_user_active_items_url": "http://example.com/api/v1/user/items/active{?page,per_page}",
"current_user_completed_items_url": "http://example.com/api/v1/user/items/completed{?page,per_page}",
})
class AuthTokenAPI(MethodView):
def post(self):
grant_type = request.form.get('grant_type')
username = request.form.get('username')
password = request.form.get('password')
if grant_type is None or grant_type.lower() != 'password':
return api_abort(code=400, message='The grant type must be password.')
user = User.query.filter_by(username=username).first()
if user is None or not user.validate_password(password):
return api_abort(code=400, message='Either the username or password was invalid.')
token, expiration = generate_token(user)
response = jsonify({
'access_token': token,
'token_type': 'Bearer',
'expires_in': expiration
})
response.headers['Cache-Control'] = 'no-store'
response.headers['Pragma'] = 'no-cache'
return response
class ItemAPI(MethodView):
decorators = [auth_required]
def get(self, item_id):
"""Get item."""
item = Item.query.get_or_404(item_id)
if g.current_user != item.author:
return api_abort(403)
return jsonify(item_schema(item))
def put(self, item_id):
"""Edit item."""
item = Item.query.get_or_404(item_id)
if g.current_user != item.author:
return api_abort(403)
item.body = get_item_body()
db.session.commit()
return '', 204
def patch(self, item_id):
"""Toggle item."""
item = Item.query.get_or_404(item_id)
if g.current_user != item.author:
return api_abort(403)
item.done = not item.done
db.session.commit()
return '', 204
def delete(self, item_id):
"""Delete item."""
item = Item.query.get_or_404(item_id)
if g.current_user != item.author:
return api_abort(403)
db.session.delete(item)
db.session.commit()
return '', 204
class UserAPI(MethodView):
decorators = [auth_required]
def get(self):
return jsonify(user_schema(g.current_user))
class ItemsAPI(MethodView):
decorators = [auth_required]
def get(self):
"""Get current user's all items."""
page = request.args.get('page', 1, type=int)
per_page = current_app.config['mytodoism_ITEM_PER_PAGE']
pagination = Item.query.with_parent(g.current_user).paginate(page, per_page)
items = pagination.items
current = url_for('.items', page=page, _external=True)
prev = None
if pagination.has_prev:
prev = url_for('.items', page=page - 1, _external=True)
next = None
if pagination.has_next:
next = url_for('.items', page=page + 1, _external=True)
return jsonify(items_schema(items, current, prev, next, pagination))
def post(self):
"""Create new item."""
item = Item(body=get_item_body(), author=g.current_user)
db.session.add(item)
db.session.commit()
response = jsonify(item_schema(item))
response.status_code = 201
response.headers['Location'] = url_for('.item', item_id=item.id, _external=True)
return response
class ActiveItemsAPI(MethodView):
decorators = [auth_required]
def get(self):
"""Get current user's active items."""
page = request.args.get('page', 1, type=int)
pagination = Item.query.with_parent(g.current_user).filter_by(done=False).paginate(
page, per_page=current_app.config['mytodoism_ITEM_PER_PAGE'])
items = pagination.items
current = url_for('.items', page=page, _external=True)
prev = None
if pagination.has_prev:
prev = url_for('.active_items', page=page - 1, _external=True)
next = None
if pagination.has_next:
next = url_for('.active_items', page=page + 1, _external=True)
return jsonify(items_schema(items, current, prev, next, pagination))
class CompletedItemsAPI(MethodView):
decorators = [auth_required]
def get(self):
"""Get current user's completed items."""
page = request.args.get('page', 1, type=int)
pagination = Item.query.with_parent(g.current_user).filter_by(done=True).paginate(
page, per_page=current_app.config['mytodoism_ITEM_PER_PAGE'])
items = pagination.items
current = url_for('.items', page=page, _external=True)
prev = None
if pagination.has_prev:
prev = url_for('.completed_items', page=page - 1, _external=True)
next = None
if pagination.has_next:
next = url_for('.completed_items', page=page + 1, _external=True)
return jsonify(items_schema(items, current, prev, next, pagination))
def delete(self):
"""Clear current user's completed items."""
Item.query.with_parent(g.current_user).filter_by(done=True).delete()
db.session.commit() # TODO: is it better use for loop?
return '', 204
api_v1.add_url_rule('/', view_func=IndexAPI.as_view('index'), methods=['GET'])
api_v1.add_url_rule('/oauth/token', view_func=AuthTokenAPI.as_view('token'), methods=['POST'])
api_v1.add_url_rule('/user', view_func=UserAPI.as_view('user'), methods=['GET'])
api_v1.add_url_rule('/user/items', view_func=ItemsAPI.as_view('items'), methods=['GET', 'POST'])
api_v1.add_url_rule('/user/items/<int:item_id>', view_func=ItemAPI.as_view('item'),
methods=['GET', 'PUT', 'PATCH', 'DELETE'])
api_v1.add_url_rule('/user/items/active', view_func=ActiveItemsAPI.as_view('active_items'), methods=['GET'])
api_v1.add_url_rule('/user/items/completed', view_func=CompletedItemsAPI.as_view('completed_items'),
methods=['GET', 'DELETE'])
| [
"[email protected]"
] | |
87bab2fc159bb9634fa25425b36346570359a211 | ac7bc015031cc7a57175d33669d98740d6cf1104 | /python/caffe/pycaffe.py | 458b374ad5ff1301ffbc93605aed10c5192a65c8 | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla"
] | permissive | sunqiangxtcsun/SSD | d22f4efc867870db9b0e01564916b3c28da8a46e | 6913b28b1cfa03b119adc1a732deac20dae46238 | refs/heads/master | 2020-05-03T02:00:19.924797 | 2019-03-29T07:33:20 | 2019-03-29T07:33:20 | 178,355,044 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,242 | py | """
Wrap the internal caffe C++ module (_caffe.so) with a clean, Pythonic
interface.
"""
from collections import OrderedDict
try:
from itertools import izip_longest
except:
from itertools import zip_longest as izip_longest
import numpy as np
from ._caffe import Net, SGDSolver, NesterovSolver, AdaGradSolver, \
RMSPropSolver, AdaDeltaSolver, AdamSolver
import caffe.io
import six
# We directly update methods from Net here (rather than using composition or
# inheritance) so that nets created by caffe (e.g., by SGDSolver) will
# automatically have the improved interface.
@property
def _Net_blobs(self):
"""
An OrderedDict (bottom to top, i.e., input to output) of network
blobs indexed by name
"""
if not hasattr(self, '_blobs_dict'):
self._blobs_dict = OrderedDict(zip(self._blob_names, self._blobs))
return self._blobs_dict
@property
def _Net_blob_loss_weights(self):
"""
An OrderedDict (bottom to top, i.e., input to output) of network
blob loss weights indexed by name
"""
if not hasattr(self, '_blobs_loss_weights_dict'):
self._blob_loss_weights_dict = OrderedDict(zip(self._blob_names,
self._blob_loss_weights))
return self._blob_loss_weights_dict
@property
def _Net_params(self):
"""
An OrderedDict (bottom to top, i.e., input to output) of network
parameters indexed by name; each is a list of multiple blobs (e.g.,
weights and biases)
"""
if not hasattr(self, '_params_dict'):
self._params_dict = OrderedDict([(name, lr.blobs)
for name, lr in zip(
self._layer_names, self.layers)
if len(lr.blobs) > 0])
return self._params_dict
@property
def _Net_inputs(self):
if not hasattr(self, '_input_list'):
keys = list(self.blobs.keys())
self._input_list = [keys[i] for i in self._inputs]
return self._input_list
@property
def _Net_outputs(self):
if not hasattr(self, '_output_list'):
keys = list(self.blobs.keys())
self._output_list = [keys[i] for i in self._outputs]
return self._output_list
def _Net_forward(self, blobs=None, start=None, end=None, **kwargs):
"""
Forward pass: prepare inputs and run the net forward.
Parameters
----------
blobs : list of blobs to return in addition to output blobs.
kwargs : Keys are input blob names and values are blob ndarrays.
For formatting inputs for Caffe, see Net.preprocess().
If None, input is taken from data layers.
start : optional name of layer at which to begin the forward pass
end : optional name of layer at which to finish the forward pass
(inclusive)
Returns
-------
outs : {blob name: blob ndarray} dict.
"""
if blobs is None:
blobs = []
if start is not None:
start_ind = list(self._layer_names).index(start)
else:
start_ind = 0
if end is not None:
end_ind = list(self._layer_names).index(end)
outputs = set([end] + blobs)
else:
end_ind = len(self.layers) - 1
outputs = set(self.outputs + blobs)
if kwargs:
if set(kwargs.keys()) != set(self.inputs):
raise Exception('Input blob arguments do not match net inputs.')
# Set input according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for in_, blob in six.iteritems(kwargs):
if blob.shape[0] != self.blobs[in_].shape[0]:
raise Exception('Input is not batch sized')
self.blobs[in_].data[...] = blob
self._forward(start_ind, end_ind)
# Unpack blobs to extract
return {out: self.blobs[out].data for out in outputs}
def _Net_backward(self, diffs=None, start=None, end=None, **kwargs):
"""
Backward pass: prepare diffs and run the net backward.
Parameters
----------
diffs : list of diffs to return in addition to bottom diffs.
kwargs : Keys are output blob names and values are diff ndarrays.
If None, top diffs are taken from forward loss.
start : optional name of layer at which to begin the backward pass
end : optional name of layer at which to finish the backward pass
(inclusive)
Returns
-------
outs: {blob name: diff ndarray} dict.
"""
if diffs is None:
diffs = []
if start is not None:
start_ind = list(self._layer_names).index(start)
else:
start_ind = len(self.layers) - 1
if end is not None:
end_ind = list(self._layer_names).index(end)
outputs = set([end] + diffs)
else:
end_ind = 0
outputs = set(self.inputs + diffs)
if kwargs:
if set(kwargs.keys()) != set(self.outputs):
raise Exception('Top diff arguments do not match net outputs.')
# Set top diffs according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for top, diff in six.iteritems(kwargs):
if diff.shape[0] != self.blobs[top].shape[0]:
raise Exception('Diff is not batch sized')
self.blobs[top].diff[...] = diff
self._backward(start_ind, end_ind)
# Unpack diffs to extract
return {out: self.blobs[out].diff for out in outputs}
def _Net_forward_all(self, blobs=None, **kwargs):
"""
Run net forward in batches.
Parameters
----------
blobs : list of blobs to extract as in forward()
kwargs : Keys are input blob names and values are blob ndarrays.
Refer to forward().
Returns
-------
all_outs : {blob name: list of blobs} dict.
"""
# Collect outputs from batches
all_outs = {out: [] for out in set(self.outputs + (blobs or []))}
for batch in self._batch(kwargs):
outs = self.forward(blobs=blobs, **batch)
for out, out_blob in six.iteritems(outs):
all_outs[out].extend(out_blob.copy())
# Package in ndarray.
for out in all_outs:
all_outs[out] = np.asarray(all_outs[out])
# Discard padding.
pad = len(six.next(six.itervalues(all_outs))) - len(six.next(six.itervalues(kwargs)))
if pad:
for out in all_outs:
all_outs[out] = all_outs[out][:-pad]
return all_outs
def _Net_forward_backward_all(self, blobs=None, diffs=None, **kwargs):
"""
Run net forward + backward in batches.
Parameters
----------
blobs: list of blobs to extract as in forward()
diffs: list of diffs to extract as in backward()
kwargs: Keys are input (for forward) and output (for backward) blob names
and values are ndarrays. Refer to forward() and backward().
Prefilled variants are called for lack of input or output blobs.
Returns
-------
all_blobs: {blob name: blob ndarray} dict.
all_diffs: {blob name: diff ndarray} dict.
"""
# Batch blobs and diffs.
all_outs = {out: [] for out in set(self.outputs + (blobs or []))}
all_diffs = {diff: [] for diff in set(self.inputs + (diffs or []))}
forward_batches = self._batch({in_: kwargs[in_]
for in_ in self.inputs if in_ in kwargs})
backward_batches = self._batch({out: kwargs[out]
for out in self.outputs if out in kwargs})
# Collect outputs from batches (and heed lack of forward/backward batches).
for fb, bb in izip_longest(forward_batches, backward_batches, fillvalue={}):
batch_blobs = self.forward(blobs=blobs, **fb)
batch_diffs = self.backward(diffs=diffs, **bb)
for out, out_blobs in six.iteritems(batch_blobs):
all_outs[out].extend(out_blobs.copy())
for diff, out_diffs in six.iteritems(batch_diffs):
all_diffs[diff].extend(out_diffs.copy())
# Package in ndarray.
for out, diff in zip(all_outs, all_diffs):
all_outs[out] = np.asarray(all_outs[out])
all_diffs[diff] = np.asarray(all_diffs[diff])
# Discard padding at the end and package in ndarray.
pad = len(six.next(six.itervalues(all_outs))) - len(six.next(six.itervalues(kwargs)))
if pad:
for out, diff in zip(all_outs, all_diffs):
all_outs[out] = all_outs[out][:-pad]
all_diffs[diff] = all_diffs[diff][:-pad]
return all_outs, all_diffs
def _Net_set_input_arrays(self, data, labels):
"""
Set input arrays of the in-memory MemoryDataLayer.
(Note: this is only for networks declared with the memory data layer.)
"""
if labels.ndim == 1:
labels = np.ascontiguousarray(labels[:, np.newaxis, np.newaxis,
np.newaxis])
return self._set_input_arrays(data, labels)
def _Net_batch(self, blobs):
"""
Batch blob lists according to net's batch size.
Parameters
----------
blobs: Keys blob names and values are lists of blobs (of any length).
Naturally, all the lists should have the same length.
Yields
------
batch: {blob name: list of blobs} dict for a single batch.
"""
num = len(six.next(six.itervalues(blobs)))
batch_size = six.next(six.itervalues(self.blobs)).shape[0]
remainder = num % batch_size
num_batches = num // batch_size
# Yield full batches.
for b in range(num_batches):
i = b * batch_size
yield {name: blobs[name][i:i + batch_size] for name in blobs}
# Yield last padded batch, if any.
if remainder > 0:
padded_batch = {}
for name in blobs:
padding = np.zeros((batch_size - remainder,)
+ blobs[name].shape[1:])
padded_batch[name] = np.concatenate([blobs[name][-remainder:],
padding])
yield padded_batch
def _Net_get_id_name(func, field):
"""
Generic property that maps func to the layer names into an OrderedDict.
Used for top_names and bottom_names.
Parameters
----------
func: function id -> [id]
field: implementation field name (cache)
Returns
------
A one-parameter function that can be set as a property.
"""
@property
def get_id_name(self):
if not hasattr(self, field):
id_to_name = list(self.blobs)
res = OrderedDict([(self._layer_names[i],
[id_to_name[j] for j in func(self, i)])
for i in range(len(self.layers))])
setattr(self, field, res)
return getattr(self, field)
return get_id_name
# Attach methods to Net.
Net.blobs = _Net_blobs
Net.blob_loss_weights = _Net_blob_loss_weights
Net.params = _Net_params
Net.forward = _Net_forward
Net.backward = _Net_backward
Net.forward_all = _Net_forward_all
Net.forward_backward_all = _Net_forward_backward_all
Net.set_input_arrays = _Net_set_input_arrays
Net._batch = _Net_batch
Net.inputs = _Net_inputs
Net.outputs = _Net_outputs
Net.top_names = _Net_get_id_name(Net._top_ids, "_top_names")
Net.bottom_names = _Net_get_id_name(Net._bottom_ids, "_bottom_names")
| [
"[email protected]"
] | |
849c3dffad42f275ad0897a284d8e4c8f34ee510 | f0e1f0d7acba08868a929889232a5e5179519302 | /tests/util/test_async_utils.py | 17fd86d02de77523238c0149ca0cc203dccaf20c | [
"Apache-2.0"
] | permissive | Awesome-Technologies/synapse | 8c5169f73908a72d49ab1ece2281bacd5a81d5c8 | 56ee2a947886fd05e505e71284abadccd5991166 | refs/heads/amp.chat | 2021-06-16T22:03:48.032508 | 2020-06-10T10:49:08 | 2020-06-10T11:19:42 | 198,403,226 | 2 | 1 | Apache-2.0 | 2020-02-10T10:03:32 | 2019-07-23T09:58:24 | Python | UTF-8 | Python | false | false | 3,745 | py | # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from twisted.internet.defer import CancelledError, Deferred
from twisted.internet.task import Clock
from synapse.logging.context import (
SENTINEL_CONTEXT,
LoggingContext,
PreserveLoggingContext,
current_context,
)
from synapse.util.async_helpers import timeout_deferred
from tests.unittest import TestCase
class TimeoutDeferredTest(TestCase):
def setUp(self):
self.clock = Clock()
def test_times_out(self):
"""Basic test case that checks that the original deferred is cancelled and that
the timing-out deferred is errbacked
"""
cancelled = [False]
def canceller(_d):
cancelled[0] = True
non_completing_d = Deferred(canceller)
timing_out_d = timeout_deferred(non_completing_d, 1.0, self.clock)
self.assertNoResult(timing_out_d)
self.assertFalse(cancelled[0], "deferred was cancelled prematurely")
self.clock.pump((1.0,))
self.assertTrue(cancelled[0], "deferred was not cancelled by timeout")
self.failureResultOf(timing_out_d, defer.TimeoutError)
def test_times_out_when_canceller_throws(self):
"""Test that we have successfully worked around
https://twistedmatrix.com/trac/ticket/9534"""
def canceller(_d):
raise Exception("can't cancel this deferred")
non_completing_d = Deferred(canceller)
timing_out_d = timeout_deferred(non_completing_d, 1.0, self.clock)
self.assertNoResult(timing_out_d)
self.clock.pump((1.0,))
self.failureResultOf(timing_out_d, defer.TimeoutError)
def test_logcontext_is_preserved_on_cancellation(self):
blocking_was_cancelled = [False]
@defer.inlineCallbacks
def blocking():
non_completing_d = Deferred()
with PreserveLoggingContext():
try:
yield non_completing_d
except CancelledError:
blocking_was_cancelled[0] = True
raise
with LoggingContext("one") as context_one:
# the errbacks should be run in the test logcontext
def errback(res, deferred_name):
self.assertIs(
current_context(),
context_one,
"errback %s run in unexpected logcontext %s"
% (deferred_name, current_context()),
)
return res
original_deferred = blocking()
original_deferred.addErrback(errback, "orig")
timing_out_d = timeout_deferred(original_deferred, 1.0, self.clock)
self.assertNoResult(timing_out_d)
self.assertIs(current_context(), SENTINEL_CONTEXT)
timing_out_d.addErrback(errback, "timingout")
self.clock.pump((1.0,))
self.assertTrue(
blocking_was_cancelled[0], "non-completing deferred was not cancelled"
)
self.failureResultOf(timing_out_d, defer.TimeoutError)
self.assertIs(current_context(), context_one)
| [
"[email protected]"
] | |
bf93d5834744eb1dfb38edcfc3b7806731b324b8 | bcecadd21228da228a54e9d11e7dcb3462fff9f5 | /Share/an.py | 83b8ac6a6aa79f82322da8f606ab159c543535b7 | [] | no_license | NotReallySynthPlls/NotReallySynthPlls | b2c5cd558e2ff489ed501a41557737e1095091a9 | b861d333e0e3e0933efececb9a428b254291083e | refs/heads/master | 2021-07-24T12:06:36.391287 | 2021-02-10T01:21:34 | 2021-02-10T01:21:34 | 243,114,543 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | import numpy as np
from ruamel import yaml
from dataclasses import dataclass
@dataclass
class Stats:
mean: float
std: float
min: float
max: float
def period_stats(fname: str) -> (np.ndarray, np.ndarray, Stats):
y = yaml.safe_load(open(fname, "r"))
ts = y['timescale']
edges = np.array(y["edges"]) * ts
periods = np.diff(edges)
stats = Stats(
mean=np.mean(periods),
std=np.std(periods),
min=np.min(periods),
max=np.max(periods),
)
return (edges, periods, stats)
| [
"[email protected]"
] | |
03604ba84c2c5d0bebc993dcfe4c9ac30d9582d2 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/LeetCode In Python/Section 13 INTERVIEW QUESTIONS Backtracking_Recursion/subsets/index.py | 4e46e6557bce57042cabb2c1408c1c12629276e2 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 425 | py | # c_ Solution
# ___ solution nums ans cur index
# __ i.. > le. n..
# r_
# a__.ap.. c.. :
# ___ i __ ra.. i.. le. n..
# __ n.. ? no. __ c..
# c__.ap.. n.. ?
# .s.. ? ? ? ?
# c__.p..
# r_
# ___ subsets nums L.. in. L.. L.. in.
# ans _ # list
# cur _ # list
# .s.. ? ? ? 0
# r_ ? | [
"[email protected]"
] | |
3bbe40df9556148fda9dda790a7dd7b10c6bea7f | fbc45074168eee8acd2ce77826799d61a4ef60a9 | /Scrapy_WDZJ/tools/net.py | 68573aef2920ed0341a937def08f27c47e608205 | [] | no_license | chendongyu0125/p2p | e8329839c2eeb5bfcb252317af83ed0fc3767048 | 4f9d85c03cac6b38fe5f39956cb5ecd0311e070f | refs/heads/master | 2020-04-11T16:28:24.833475 | 2018-12-23T23:47:16 | 2018-12-23T23:47:16 | 161,925,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,943 | py | # coding=utf-8
import urllib.request
from scrapy.http import Request, FormRequest
import time
from Scrapy_WDZJ import settings
import logging
import requests
def valid_proxyip(proxy):
"""
proxy['ip'] = '182.18.13.149'
proxy['port'] = '53281'
proxy['protocal'] = 'http'
:param proxy:
:return: True if it is a valid proxy ip
"""
# telnetlib.Telnet(ip, port=port, timeout=3)
ip = proxy['ip']
port = proxy['port']
protocal = proxy['protocal']
headers = {
'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36"
}
proxies = {protocal: "{0}://{1}:{2}".format(protocal, ip, port)}
logging.debug(proxies)
try:
res=requests.get('http://www.suda.edu.cn', proxies=proxies, headers=headers, timeout=1)
if res.status_code==200:
proxy['valid']=1
else:
proxy['valid']=0
except Exception as err:
proxy['valid']=0
logging.debug(err)
return proxy
def reconnect_Request(response, callback):
"""
如果访问被拒,返回的状态码是555,含义为疑似遭受黑客攻击
休息时间 settings.get("SLEEP_UNIT") * tries
如果尝试次数小于100,则继续尝试
:param response: 放回状态
:param callback: 再次尝试的请求
:return: Request | None
"""
url = response.url
meta = response.meta
tries = int(meta['tries'])
meta['tries'] =str(tries + 1)
if tries <= 100:
sleep_unit= int(settings.SLEEP_UNIT)
logging.debug("尝试通过Request方式连接:{0}, meta参数:{1},尝试次数:{2},休息一下".format(url, meta, tries))
time.sleep(sleep_unit*tries)
return Request(url, meta=meta, callback=callback)
else:
logging.error("尝试通过Request方式连接:{0}, meta参数:{1}, 尝试次数大于:100, 放弃。。。".format(url, meta))
def reconnect_FormRequest(response, callback, formdata):
"""
如果访问被拒,返回的状态码是555
休息时间 settings.get("SLEEP_UNIT") * tries
如果尝试次数小于100,则继续尝试
:param response: 放回状态
:param callback: 再次尝试的请求
:return: FormRequest | None
"""
url = response.url
meta = response.meta
tries = int(meta['tries'])
meta['tries'] =str(tries + 1)
if tries <= 100:
sleep_unit= int(settings.SLEEP_UNIT)
logging.debug("尝试通过FormRequest方式连接:{0}, formdata参数:{1}, meta参数:{2},尝试次数:{3},休息一下".format(url, formdata, meta, tries))
time.sleep(sleep_unit*tries)
return FormRequest(url, formdata=formdata, meta=meta, callback=callback)
else:
logging.error("尝试通过FormRequest方式连接:{0}, formdata参数:{1}, meta参数:{2}, 尝试次数:大于100, 放弃。。。".format(url, meta)) | [
"[email protected]"
] | |
76baaefae0f4d04174b42a0ccce9760680dae7c4 | 69bb4f776a1f622c120934408eed2c62db506085 | /iotkitclient/client.py | 3e1684895df2f6d057820b1407e94a1286120b2a | [] | no_license | arkocal/oisp-sdk-python | fbad355d01def8b316f9ba2ef469b9c01718009f | 3f2a77083c6375a49a55a6692febd994b49f201f | refs/heads/master | 2021-04-03T07:08:53.919948 | 2018-03-06T11:37:51 | 2018-03-06T11:40:21 | 124,365,362 | 0 | 0 | null | 2018-03-08T09:03:41 | 2018-03-08T09:03:41 | null | UTF-8 | Python | false | false | 15,974 | py | # Copyright (c) 2015-2018, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Methods for IoT Analytics Cloud connections."""
import json
import requests
from iotkitclient.account import Account
from iotkitclient.device import Device
from iotkitclient.oic_token import UserToken
from iotkitclient.oic_user import User
class AuthenticationError(Exception):
"""Authentication Error class for Open IOT Connector.
This Error is thrown if an error occurs before server is even
contacted, otherwise an OIC Exception will be thrown, even in the
case of an authentication related exception
"""
pass
class OICException(Exception):
"""Exception for cases when an error code is returned from the server."""
INVALID_REQUEST = 400
NOT_AUTHORIZED = 401
NOT_FOUND = 404
TOO_MANY_REQUESTS = 429
INTERNAL_SERVER_ERROR = 500
ANALYTICS_ERROR = 999
DEVICE_INVALID_DATA = 1400
DEVICE_NOT_FOUND = 1404
DEVICE_ALREADY_EXISTS = 1409
INVALID_ACTIVATION_CODE = 1410
DEVICE_SAVING_ERROR = 1500
DEVICE_ACTIVATION_ERROR = 1510
DEVICE_DELETION_ERROR = 1512
DEVICE_REGISTRATION_ERROR = 1513
USER_INVALID_DATA = 2300
WEAK_PASSWORD = 2401
EMAIL_NOT_VERIFIED = 2402
ACCOUNT_LOCKED = 2403
TERMS_AND_CONDITIONS_ERROR = 2405
INVALID_INTERACTION_TOKEN = 2406
USER_ALREADY_EXISTS = 2409
USER_ALREADY_INVITED = 2420
SOCIAL_LOGIN_NOT_CONFIGURED = 2422
USER_SAVING_ERROR = 2500
CANNOT_SEND_ACTIVATION_EMAIL = 2501
USER_SAVING_ERROR_AA = 2502
USER_DELETION_ERROR_AA = 2502
CANNOT_REDUCE_ADMIN_PRIVILEGES = 2503
ACCOUNT_INVALID_DATA = 3400
CANNOT_CHANGE_TRACK_SENSOR = 3401
ACCOUNT_NOT_FOUND = 3404
ACCOUNT_ALREADY_EXISTS = 3409
ACCOUNT_SAVING_ERROR = 3500
# pylint: disable=invalid-name
ACCOUNT_SAVING_ERROR_ADD_OR_UPDATE = 3510
ACCOUNT_DELETION_ERROR = 3511
ACCOUNT_DELETION_ERROR_AA = 3512
COMPONENT_INVALID_DATA = 5400
COMPONENT_NOT_FOUND = 5404
COMPONENT_ALREADY_EXISTS = 5409
SEARCH_PROCESSING_ERROR = 5410
INVALID_PARAMETER_NAME = 5411
INVALID_PARAMETER_VALUES = 5412
DATA_INVALID_DATA = 6400
FORMAT_ERROR = 6500
# pylint: disable=invalid-name
OFFSET_AND_LIMIT_BOTH_OR_NONE_REQUIRED = 6504
SUBMISSION_ERROR = 6505
WRONG_RESPONSE_CODE_FROM_AA = 6506
RULE_INVALID_DATA = 7400
PROPERTY_MISSING = 7401
INVALID_SYNCHRONIZATION_STATUS = 7402
RULE_NOT_FOUND = 7404
RULE_ALREADY_EXISTS = 7409
RULE_NOT_FOUND_FROM_PROXY = 7444
RULE_DELETION_ERROR = 7557
ACTIVATED_RULE_DELETION_ERROR = 7558
CANNOT_USE_API = 7600
ALERT_RULE_NOT_FOUND = 8401
ALERT_ACCOUNT_NOT_FOUND = 8402
ALERT_DEVICE_NOT_FOUND = 8403
ALERT_NOT_FOUND = 8404
WRONG_ALERT_STATUS = 8405
ALERT_ALREADY_EXISTS = 8409
ALERT_SAVING_ERROR_AA = 8500
ALERT_SAVING_ERROR = 8501
ALERT_SAVING_ERROR_COMMENTS = 8502
INVITATION_NOT_FOUND = 10404
INVITATION_DELETION_ERROR = 10500
ACTUATION_SEARCH_ERROR = 12500
ACTUATION_SAVING_ERROR = 12501
def __init__(self, expect, resp):
"""Create OICException.
Args
----------
expect: Expected HTTP Response code
resp: Received response object from requests.
"""
message = ("Exception during API call\n"
"HTTP code: {}, {} was expected".format(resp.status_code,
expect))
try:
resp_json = resp.json()
if resp_json:
pretty = json.dumps(resp_json, indent=4,
separators=(',', ': '))
message += "\nError message: {}".format(pretty)
self.code = resp_json.get("code")
except json.JSONDecodeError:
message += "\nResponse: {}".format(resp.content)
super(OICException, self).__init__(message)
class Client(object):
"""IoT Analytics Cloud client class.
Attributes: proxies (str): proxy server used for connection
user_token (str): access token from IoT Analytics site connection
user_id (str): user ID for authenticated user
"""
def __init__(self, api_root, proxies=None, verify_certs=True):
"""Set up IOT Analytics Cloud connection.
Args:
----------
api_root (str): IoT Analytics server address (defaults
to https://streammyiot.com/v1/api)
proxies (dict, optional): dictionary of proxy server addresses
(e.g., {"https": "http://proxy-us.mycorp.com:8080"}
The API will respect system proxy settings if none specified.
verify_certs (bool, optional): Whether the certificates should
be verified on each request.
"""
self.base_url = api_root
self.proxies = proxies
self.verify_certs = verify_certs
self.user_token = None
self.user_id = None
# Contains last reponse
self.response = None
# Test connection
self.get_server_info()
def get_headers(self, authorize_as=None, authorize=True):
"""Return a JSON dictionary containing request headers.
Args:
---------
authorize (bool, optional): Whether auth token is to be included
authorize_as (optional): When using device authorization, a device
object with a valid device_token has to be given.
If this is None (default), client will attempt user authorization.
"""
headers = {"content-type": "application/json"}
if not authorize:
return headers
if authorize_as is None:
if not self.user_token:
raise AuthenticationError("You need to authenticate using "
"the auth method first, or authorize"
"as a device")
if self.user_token.is_expired():
raise AuthenticationError("UserToken expired, you need to use "
"the auth method again.")
token = self.user_token.value
else:
assert isinstance(authorize_as, Device), """You can only authorize as
Device, leave authorize_as empty for user authorization."""
token = authorize_as.device_token
headers["Authorization"] = "Bearer " + token
return headers
def auth(self, username, password):
"""Submit IoT Analytics user credentials to obtain the access token.
Sets user_id and user_token attributes for connection instance
Args:
----------
username (str): username for IoT Analytics site
password (str): password for IoT Analytics site
"""
payload = {"username": username, "password": password}
resp = self.post("/auth/token", data=payload, authorize=False,
expect=200)
token_str = resp.json()["token"]
self.user_token = self.get_user_token(token_str)
self.user_id = self.user_token.user_id
def get_user_token(self, token_str=None):
"""Return a UserToken object containing user token information.
Args:
----------
token_str (str): If token string is not specified, the
last acquired token will be used.
"""
if not token_str and self.user_token:
return self.user_token
if not token_str:
raise ValueError("token_str must be specified for first token"
"acquisation")
if token_str:
headers = self.get_headers(authorize=False)
headers["Authorization"] = "Bearer " + token_str
else:
headers = self.get_headers()
# authorize=False because it is done manually, as token object NA yet
resp = self.get("/auth/tokenInfo", headers=headers, authorize=False,
expect=200)
return UserToken.from_json(token_str, resp.json(), client=self)
def get_user(self, user_id=None):
"""Get the user with given user_id.
If None specified, the token holder will be returned.
"""
if not user_id:
user_id = self.user_token.user_id
resp = self.get("/users/" + user_id, expect=200)
return User.from_json(client=self, json_dict=resp.json())
def reset_password_request_mail(self, email):
"""Send a password reset mail to given email adress."""
self.post("/users/forgot_password", data={"email": email},
authorize=False, expect=200)
def reset_password_submit_new(self, token, password):
"""Reset password using the token obtained via email."""
payload = {"token": token, "password": password}
self.put("/users/forgot_password", data=payload,
authorize=False, expect=200)
def change_user_password(self, email, current_password, new_password):
"""Change password for user identified by email."""
url = "/users/{}/change_password".format(email)
payload = {"currentpwd": current_password, "password": new_password}
self.put(url, data=payload, authorize=False, expect=200)
def request_user_activation(self, email):
"""Send user with given email adress an activation mail."""
self.post("/users/request_user_activation", data={"email": email},
authorize=False, expect=200)
def get_server_info(self):
"""Get cloud version and health information.
Returns: a JSON dictionary
"""
resp = self.get("/health", authorize=False, expect=200)
return resp.json()
def get_accounts(self):
"""Get a list of accounts connected to current authentication token."""
return self.user_token.accounts
def get_device(self, device_token, device_id, domain_id=None,
fetch_info=True):
"""Get a device using a device token.
Args:
----------
device_token (str): as received while activating device.
device_id (str): device id on the service.
domain_id (str): as received while activating the device,
this is the same as the account_id of the account the device
is bound to.
fetch_info (boolean): whether to fetch device information.
"""
fetch_info = fetch_info
headers = self.get_headers(authorize=False)
headers["Authorization"] = "Bearer " + device_token
url = "/devices/{}".format(device_id)
if fetch_info:
response = self.get(url, headers=headers, authorize=False,
expect=200)
json_dict = response.json()
else:
json_dict = {"deviceId": device_id,
"domainId": domain_id}
return Device.from_json(json_dict, client=self,
device_token=device_token)
def create_account(self, name):
"""Create an account with given name and return an Account instance.
A new token needs to be acquired using the auth method to access
the account.
"""
payload = {"name": name}
resp = self.post("/accounts", data=payload, expect=201)
resp_json = resp.json()
return Account(self, resp_json["name"], resp_json["id"],
Account.ROLE_ADMIN)
# pylint: disable=too-many-arguments
# All arguments are necessary and this method is not exposed
def _make_request(self, request_func, endpoint, authorize, authorize_as,
expect=None, *args, **kwargs):
"""Make a request using global settings.
Raises an OICException if a status code other than expect is
returned.
"""
headers = kwargs.pop("headers",
self.get_headers(authorize=authorize,
authorize_as=authorize_as))
proxies = kwargs.pop("proxies", self.proxies)
verify = kwargs.pop("verify", self.verify_certs)
if "data" in kwargs and isinstance(kwargs.get("data"), dict):
kwargs["data"] = json.dumps(kwargs["data"])
url = self.base_url + endpoint
resp = request_func(url, headers=headers, proxies=proxies,
verify=verify, *args, **kwargs)
self.response = resp
if expect and (resp.status_code != expect):
raise OICException(expect, resp)
return resp
def get(self, endpoint, authorize=True, authorize_as=None,
*args, **kwargs):
"""Make a GET request.
Args:
----------
endpoint: Endpoint without the API root.
authorize: Whether authorization token should be included.
Other arguments are passed to requests module.
"""
return self._make_request(requests.get, endpoint, authorize,
authorize_as, *args, **kwargs)
def post(self, endpoint, authorize=True, authorize_as=None,
*args, **kwargs):
"""Make a POST request.
Args:
----------
endpoint: Endpoint without the API root.
authorize: Whether authorization token should be included.
Other arguments are passed to requests module.
"""
return self._make_request(requests.post, endpoint, authorize,
authorize_as, *args, **kwargs)
def put(self, endpoint, authorize=True, authorize_as=None,
*args, **kwargs):
"""Make a PUT request.
Args:
----------
endpoint: Endpoint without the API root.
authorize: Whether authorization token should be included.
Other arguments are passed to requests module.
"""
return self._make_request(requests.put, endpoint, authorize,
authorize_as, *args, **kwargs)
def delete(self, endpoint, authorize=True, authorize_as=None,
*args, **kwargs):
"""Make a DELETE request.
Args:
----------
endpoint: Endpoint without the API root.
authorize: Whether authorization token should be included.
Other arguments are passed to requests module.
"""
return self._make_request(requests.delete, endpoint, authorize,
authorize_as, *args, **kwargs)
| [
"[email protected]"
] | |
92ba01d4aeb7fcf513cc96fff444f84e81b55da5 | 7854d2729379e423d14428e69d8fdfbfedf7b98b | /src/heuristic_search.py | 8e4adf9619ca9ca2a2471e559b88a2c31f7c7e11 | [] | no_license | patrick-vieira/INF01048_T1_8-Puzzle | 06f3dddcbca0c5f71ec8a2bc7877f034a49ce5a8 | 29eb5ae83d84ad46c9023e92417aaccc150c0fef | refs/heads/main | 2023-03-08T15:10:29.834390 | 2021-02-20T01:50:50 | 2021-02-20T01:50:50 | 337,906,989 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 936 | py | import heapq
from typing import Callable
import parameters
from expand import expand_node
from monitor import Monitor
from node import Node
def heuristic_search(root_node: Node, heuristic: Callable[[Node], int]) -> (Node, Monitor):
X = set()
F = []
heapq.heapify(F)
heapq.heappush(F, root_node)
monitor = Monitor(X, F)
monitor.start()
while True:
if not F:
monitor.finish()
return False, monitor
# with MonitorPerformance():
v = heapq.heappop(F)
if v.state == parameters.objective_state:
monitor.finish()
return v, monitor
elif v.state not in X: #rever isso, o estado pode estar aqui já, mas ter chego por outro caminho
X.add(v.state)
monitor.count()
for node in expand_node(v):
node.set_heuristic_cost(heuristic(node))
heapq.heappush(F, node)
| [
"[email protected]"
] | |
3734609f2260ddfab328e8845c29bb0bb3d8622e | 04c8759bbedd318ce20f091c9ac09f35eedf7c0c | /class_3.py | 8e4a350150fddd173976f5b344f9764dc62b5dc4 | [] | no_license | chernenkiyyyy/hw_python | 33f4325cd4ebb72c51bbc74a86287811c40165fd | a3b3440c062cb5807655b67de3b14c760dea6ab6 | refs/heads/master | 2020-03-25T12:20:50.059937 | 2018-09-03T22:18:23 | 2018-09-03T22:18:23 | 143,771,466 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | class LogIn:
__slots__ = ["__dict__"]
def __init__(self):
self.unit_name = "user001"
self.mac_address = "123.153.256.96"
self.ip_address = "12.34.54.25"
self.login = "[email protected]"
self.password = "qwerty001"
@property
def get_unit_name(self):
return self.unit_name
@get_unit_name.setter
def get_unit_name(self, new_unit_name):
self.unit_name = new_unit_name
@property
def get_mac_address(self):
return self.mac_address
@get_mac_address.setter
def get_mac_address(self, new_mac_address):
self.mac_address = new_mac_address\
@property
def get_ip_address(self):
return self.ip_address
@get_ip_address.setter
def get_ip_address(self, new_ip_address):
self.ip_address = new_ip_address
@property
def get_login(self):
return self.login
@get_login.setter
def get_login(self, new_login):
self.login = new_login
@property
def get_password(self):
return self.password
@get_password.setter
def get_password(self, new_password):
self.password = new_password
log = LogIn()
print(log.__dict__)
log.get_unit_name = "00000"
log.get_mac_address = "11111"
log.get_ip_address = "22222"
log.get_login = "33333"
log.get_password = "44444"
print(log.__dict__) | [
"[email protected]"
] | |
30b534d409fdddb82db90e74490f1ffe75c06b86 | 337e04c14a975868229cd2cdda3bcacfdf016869 | /src/exp/dataset.py | c66040f5457d7c8309df38bd0087f1f5c646c178 | [] | no_license | esddse/IEN | 966ea827991b898900c5dc4cea9accf971d7e52c | 175d6be32e11135287c59c66c4d743b5c2782d52 | refs/heads/master | 2023-01-20T09:03:04.030694 | 2020-11-16T09:10:23 | 2020-11-16T09:10:23 | 300,647,427 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,306 | py | import os
import sys
import time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")))
import math
import random
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import Dataset
from util.path import *
from util.data import *
from config.propara import NCETConfig
# ====================== main task =============================
class ProParaDataset(Dataset):
''' dataset for ProPara main task '''
def __init__(self, path_data_dir, word2index, label2index, max_word_length, padding=False, sent_location=False, max_data_size=None):
''' '''
datafile_name = "preprocess.pkl"
self.datas = load_pkl(os.path.join(path_data_dir, datafile_name))
self.max_data_size = max_data_size
self.word2index = word2index
self.label2index = label2index
self.pad_idx = 0
self.unk_idx = 1
self.max_word_length = max_word_length
self.padding = padding
if self.max_data_size:
self.datas = self.datas[:max_data_size]
def __getitem__(self, index):
data = self.datas[index]
doc_idx = data["doc_idx"]
# sentence
words, words_idxs, sent_lens = [], [], []
for sent in data["sentence"]:
words += sent
words_idxs += [self.word2index.get(word, self.unk_idx) for word in sent]
sent_lens.append(len(sent))
words_length = len(words)
sents_length = len(data["sentence"]) + 2 # add <SOS> & <EOS>
# verb
verbs = [0] * len(words)
verbs_idxs_sents = []
base_idx = 0
for i, verb_idxs in enumerate(data["verb_idxs"]):
verbs_idxs_sent = []
for idx in verb_idxs:
verbs[base_idx+idx] = 1
verbs_idxs_sent.append(base_idx+idx)
verbs_idxs_sents.append(verbs_idxs_sent)
base_idx += sent_lens[i]
# -------------------#
# entity #
# -------------------#
# entity
entity_to_idx, idx_to_entity = {}, {}
for i, entity in enumerate(data["entities"]):
entity_to_idx[entity] = i
idx_to_entity[i] = entity
# entity linking
entity_idxs_sents = {entity:[] for entity in data["entities"]}
for entity, entity_idxs in data["entity_idxs"].items():
base_idx = 0
for i, idxs in enumerate(entity_idxs):
entity_idxs_sent = []
for idx in idxs[0] if idxs else []:
entity_idxs_sent.append(base_idx+idx)
entity_idxs_sents[entity].append(entity_idxs_sent)
base_idx += sent_lens[i]
# stats gold label
entity_states_gold = {entity:[] for entity in data["entities"]}
for entity, gold in data["gold"].items():
for action in gold["action"]:
entity_states_gold[entity].append(self.label2index[action])
entity_states_gold[entity] = [1] + entity_states_gold[entity] + [2]
# -------------------------------#
# location candidates #
# -------------------------------#
# location candidates
location_candidate_to_idx, idx_to_location_candidate = {"-": 0, "?": 1}, {0: "-", 1: "?"}
for i, location_candidate in enumerate(data["location_candidates"]):
location_candidate_to_idx[location_candidate] = i+2
idx_to_location_candidate[i+2] = location_candidate
# location candidate linking
location_candidate_idxs_sents = {location_candidate:[] for location_candidate in data["location_candidates"]}
for location_candidate, location_candidate_idxs in data["location_candidate_idxs"].items():
base_idx = 0
for i, idxs in enumerate(location_candidate_idxs):
location_candidate_idxs_sent = []
for idx in idxs[0] if idxs else []:
location_candidate_idxs_sent.append(base_idx+idx)
location_candidate_idxs_sents[location_candidate].append(location_candidate_idxs_sent)
base_idx += sent_lens[i]
location_candidate_idxs_sents["-"] = [[] for _ in range(len(sent_lens))]
location_candidate_idxs_sents["?"] = [[] for _ in range(len(sent_lens))]
# location gold label
entity_locations_gold = {entity:[] for entity in data["entities"]}
for entity, gold in data["gold"].items():
for location in gold["location_after"]:
entity_locations_gold[entity].append(location_candidate_to_idx.get(location, 1))
entity_locations_gold[entity] = [location_candidate_to_idx.get(gold["location_before"][0], 0)] + entity_locations_gold[entity] + [0]
# padding
if self.padding:
words_idxs = padding_sequence(words_idxs, self.max_word_length, self.pad_idx)
verbs = padding_sequence(verbs, self.max_word_length, self.pad_idx)
data = {
"doc_idx": doc_idx,
"words": words,
"words_idxs": words_idxs,
"verbs": verbs,
"words_length": words_length,
"sents_length": sents_length,
"verbs_idxs_sents": verbs_idxs_sents,
"entity_idxs_sents": entity_idxs_sents,
"entity_states_gold": entity_states_gold,
"location_candidate_idxs_sents": location_candidate_idxs_sents,
"entity_locations_gold": entity_locations_gold,
"entity_to_idx": entity_to_idx,
"idx_to_entity": idx_to_entity,
"location_candidate_to_idx": location_candidate_to_idx,
"idx_to_location_candidate": idx_to_location_candidate
}
return data
def __len__(self):
return len(self.datas)
# ========= main ========
if __name__ == '__main__':
config = NCETConfig()
dataset = ProParaDataset(path_leaderboard_train_dir, config.word2index, config.label2index, config.max_word_length)
dataset[0] | [
"[email protected]"
] | |
bcf9d5165d2472f4481ea6adbbe98aeff68e9357 | 07cf86733b110a13224ef91e94ea5862a8f5d0d5 | /organizing_containers_of_balls/organizing_containers_of_balls.py | 2e9ae573293f4edce2c0ed2d9a2559bd066b30f0 | [] | no_license | karsevar/Code_Challenge_Practice | 2d96964ed2601b3beb324d08dd3692c3d566b223 | 88d4587041a76cfd539c0698771420974ffaf60b | refs/heads/master | 2023-01-23T17:20:33.967020 | 2020-12-14T18:29:49 | 2020-12-14T18:29:49 | 261,813,079 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,432 | py | def organizingContainers(container):
print(container)
# create an array that will carry the rows
# create an array that will carry the cols
# create a for loop that will iterate through the each row
# make sure to sum the total of all the values in the current
# row and append it the sum to the rows array
# create a col_sum variable initialize the value to zero
# create an additional for loop that will iterate through each of the
# columns in the matrix
# add the current column value to col_sum
# append col_sum to the cols array
# create an additional for loop that will iterate through the
# rows and columns
# check if the rows and columns are equal to each other
rows = []
columns = []
col_index = 0
while col_index != len(container[0]):
column_sum = 0
for row in range(len(container)):
# print('column value', container[row][col_index])
column_sum += container[row][col_index]
columns.append(column_sum)
col_index += 1
for row in container:
rows.append(sum(row))
# print('rows array', rows)
# print('columns array', columns)
rows.sort()
columns.sort()
for list_compare in range(len(rows)):
if rows[list_compare] != columns[list_compare]:
return 'Impossible'
return 'Possible' | [
"[email protected]"
] | |
2709eac3fe5e7b170c5ab00fd322d5bac728aaf8 | 7f7210d73d2623ca374d181aba3bc9cad3b400ee | /posts/migrations/0001_initial.py | b4fbf8fc2b00aa2d401bddbc1343e7e9b78db270 | [] | no_license | Vinstol/hw05_final | 7d6ff8e451a4b3aa2fa8324f6fff9b8e59df4d96 | f8db603fa5c31c67020960e3127a207b6ea44f69 | refs/heads/master | 2023-08-10T16:20:08.072624 | 2021-09-28T16:38:32 | 2021-09-28T16:38:32 | 346,751,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | # Generated by Django 2.2 on 2020-12-22 14:07
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
371f4cf760cf94f2350a63877f0cf1e1a112944f | 56335b17fe5fdea951bc947a510309b41fba4c1e | /Lecture9/Photoshop.py | f8ca68ea7567c2c649da22a8ce3da080870e10d3 | [] | no_license | FangFeng-077/easy-python | 6bed4541686afe0abbe5d53fb836e11ac0c7fab8 | a3237a9e98acabe2cfa11634f7444369e551f2c1 | refs/heads/master | 2020-06-16T15:24:24.433679 | 2019-07-13T00:25:40 | 2019-07-13T00:25:40 | 195,621,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,062 | py | from simpleimage import SimpleImage
def red_channel(filename):
"""
Creates an image for the given filename.
Changes the image as follows:
For every pixel, set green and blue values to 0
yielding the red channel.
Return the changed image.
"""
image = SimpleImage(filename)
for pixel in image:
pixel.green = 0
pixel.blue = 0
return image
def darker(filename):
"""
Makes the image darker by halving red,green,blue values.
Returns the changed image.
"""
# Demonstrate looping over all the pixels of an image,
# using pixel.xxx in the loop to change each pixel,
# int division, relative var updates.
image = SimpleImage(filename)
for pixel in image:
pixel.red = pixel.red // 2
pixel.green = pixel.green // 2
pixel.blue = pixel.blue // 2
# Could use += shorthand:
# pixel.blue //= 2
return image
def right_half(filename):
"""
Change and return the image:
make right half of the image
to be 50% as bright. Use int division
to compute where right half begins.
Properties reminder:
pixel.x pixel.y image.width image.height
Also try bottom half:
pixel.y >= image.height // 2
"""
image = SimpleImage(filename)
for pixel in image:
# if pixel is in right half of image
# (e.g. width is 100, right half begins at x=50)
if pixel.x >= image.width // 2:
pixel.red *= 0.5
pixel.green *= 0.5
pixel.blue *= 0.5
return image
def right_quarter(filename):
"""
As above, but do the lower right quarter.
Use "and" to combine 2 <= tests.
"""
image = SimpleImage(filename)
for pixel in image:
if (pixel.x >= image.width // 2 and
pixel.y >= image.height // 2):
pixel.red *= 0.5
pixel.green *= 0.5
pixel.blue *= 0.5
return image
def grayscale(filename):
"""
Change the image to be grayscale
using the "average" technique
and return it.
"""
image = SimpleImage(filename)
for pixel in image:
average = (pixel.red + pixel.green + pixel.blue) // 3
pixel.red = average
pixel.green = average
pixel.blue = average
return image
def curb_repair1(filename):
"""
Detect the red curb pixels, change them
to 180/180/180 gray.
This code does the gray setting,
but the hurdle factor needs to be adjusted.
Looks ok but not great.
"""
image = SimpleImage(filename)
for pixel in image:
average = (pixel.red + pixel.green + pixel.blue) // 3
if pixel.red >= average * 1.0:
pixel.red = 180
pixel.blue = 180
pixel.green = 180
return image
def curb_repair2(filename):
"""
Detect the red curb pixels, change them
to grayscale. The code here is complete:
factor is adjusted and grayscale is in.
This looks good!
"""
image = SimpleImage(filename)
for pixel in image:
average = (pixel.red + pixel.green + pixel.blue) // 3
if pixel.red >= average * 1.1:
pixel.red = average
pixel.blue = average
pixel.green = average
return image
def stop_leaves(front_filename, back_filename):
"""
Implement stop_leaves as described.
Detect red areas of stop sign.
Replace red pixels with pixels from corresponding x,y
from back image.
"""
image = SimpleImage(front_filename)
back = SimpleImage(back_filename)
for pixel in image:
average = (pixel.red + pixel.green + pixel.blue) // 3
if pixel.red >= average * 1.6:
# the key line:
pixel_back = back.get_pixel(pixel.x, pixel.y)
pixel.red = pixel_back.red
pixel.green = pixel_back.green
pixel.blue = pixel_back.blue
return image
def mirror(filename):
"""
Copy the original image
to the right half of "out", but as a horizontally reversed
mirror image. So the left half is a regular copy,
and the right half is a mirror image.
"""
image = SimpleImage(filename)
out = SimpleImage.blank(image.width * 2, image.height)
for y in range(image.height):
for x in range(image.width):
pixel = image.get_pixel(x, y)
# left copy
pixel_left = out.get_pixel(x, y)
pixel_left.red = pixel.red
pixel_left.green = pixel.green
pixel_left.blue = pixel.blue
# right copy
pixel_right = out.get_pixel(out.width - 1 - x, y)
pixel_right.red = pixel.red
pixel_right.green = pixel.green
pixel_right.blue = pixel.blue
return out
def shrink(filename):
"""
Create a new "out" image half the width and height
of the original.
Set pixels at x=0 1 2 3 in out, from x=0 2 4 6 in original,
and likewise in the y direction.
"""
image = SimpleImage(filename)
out = SimpleImage.blank(image.width // 2, image.height // 2)
# Here looping x,y over out, not original
for y in range(out.height):
for x in range(out.width):
pixel_out = out.get_pixel(x, y)
orig_pixel = image.get_pixel(x * 2, y * 2)
pixel_out.red = orig_pixel.red
pixel_out.green = orig_pixel.green
pixel_out.blue = orig_pixel.blue
return out
def flip_horizontal(filename):
"""
Create a new "out" image that has been flipped horizontally
from the original.
Reverses the pixels at opposite x values.
"""
image = SimpleImage(filename)
# Here looping x,y over out, not original
for y in range(image.height):
for x in range(image.width // 2):
pixel = image.get_pixel(x, y)
opposite_pixel = image.get_pixel(image.width - 1 - x, y)
# Temp variables to store old pixel RGB values
temp_red = pixel.red
temp_green = pixel.green
temp_blue = pixel.blue
# Update pixel
pixel.red = opposite_pixel.red
pixel.green = opposite_pixel.green
pixel.blue = opposite_pixel.blue
# Update opposite pixel
opposite_pixel.red = temp_red
opposite_pixel.green = temp_green
opposite_pixel.blue = temp_blue
return image
def main():
"""
Run your desired photoshop functions here.
You should save the return value of the image and then
call .show() to visualize the output of your program.
"""
original_poppy = SimpleImage('images/poppy.png')
original_poppy.show()
original_dandelion = SimpleImage('images/dandelion.png')
original_dandelion.show()
redder_poppy = red_channel('images/poppy.png')
redder_poppy.show()
darker_poppy = darker('images/poppy.png')
darker_poppy.show()
right_half_poppy = right_half('images/poppy.png')
right_half_poppy.show()
right_quarter_poppy = right_quarter('images/poppy.png')
right_quarter_poppy.show()
grayscale_poppy = grayscale('images/poppy.png')
grayscale_poppy.show()
grayscale_dandelion = grayscale('images/dandelion.png')
grayscale_dandelion.show()
original_curb = SimpleImage('images/curb.png')
original_curb.show()
curb_repair_first = curb_repair1('images/curb.png')
curb_repair_first.show()
curb_repair_second = curb_repair2('images/curb.png')
curb_repair_second.show()
original_stop = SimpleImage('images/stop.png')
original_stop.show()
original_leaves = SimpleImage('images/leaves.png')
original_leaves.show()
stop_leaves_replaced = stop_leaves('images/stop.png', 'images/leaves.png')
stop_leaves_replaced.show()
mirror_poppy = mirror('images/poppy.png')
mirror_poppy.show()
small_leaves = shrink('images/leaves.png')
small_leaves.show()
shrink_leaves = flip_horizontal('images/poppy.png')
shrink_leaves.show()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
5be7d08b4036d8cfe107fc93a3466badd3f558e7 | eed258365d6ee69a051aab3403f203cb25118774 | /meain.py | 4fb24c3242f9074a397fcf8d02a6a424e9ef9f07 | [] | no_license | dearodriguezve/MRI | be69fba7fa1e897cfcb7556cd3982d8ef06b65d6 | cc61533838cbbff1dce3540827f39f2e902fbd44 | refs/heads/master | 2020-04-28T22:04:09.072580 | 2019-03-14T11:03:00 | 2019-03-14T11:03:00 | 175,604,608 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,129 | py | import os
from sklearn.utils import shuffle
import skimage
import h5py as h5
import numpy as np
import matplotlib.pyplot as plt
import pydicom as dicom
from skimage.color import rgb2hsv
from sklearn import metrics, model_selection
import tensorflow as tf
from tensorflow.contrib import learn
import scipy
from skimage import data, io, restoration,segmentation, filters
from skimage.color import rgb2gray
from scipy.signal import convolve2d
def eliminacionRuido(imagen , mostrar=False):
grayscale = rgb2gray(imagen)
rst_DNM =filters.gaussian(grayscale,sigma=1.5)
if mostrar:
fig, axes = plt.subplots(1, 3, figsize=(8, 4))
ax = axes.ravel()
ax[0].imshow(imagen)
ax[0].set_title("Original")
ax[1].imshow(rst_DNM, cmap=plt.cm.gray)
ax[1].set_title("Desnoise")
ax[2].imshow(grayscale, cmap=plt.cm.gray)
ax[2].set_title("GrayScale")
fig.tight_layout()
plt.show()
return rst_DNM
def segmentacion(imagen, mostrar = False):
"""labelArray = measure.label(pixel_array_numpy, return_num=True, neighbors=4)
print(labelArray)
imagenSegmentada = segmentation.quickshift(image,convert2lab=False)
io.imsave('segmetada1.jpg', imagenSegmentada)"""
"""imagenSegmentada=segmentation.random_walker(imagen, labelArray)"""
img =imagen
thresh = filters.threshold_otsu(img)
binary = img <= thresh
if(mostrar):
fig, axes = plt.subplots(1, 2, figsize=(8, 4))
ax = axes.ravel()
ax[0].imshow(imagen, cmap=plt.cm.gray)
ax[0].set_title("Original")
ax[1].imshow(binary, cmap=plt.cm.gray)
ax[1].set_title("Segmentation")
fig.tight_layout()
plt.show()
return binary
def leerMat(direccion, mostrar = False):
matA = h5.File(direccion,'r')
imagen =matA['/cjdata/image']
if np.array(imagen).shape[0] != 512 or np.array(imagen).shape[1] != 512:
return [],[]
label = int(matA['/cjdata/label'][0][0])
array = np.mat(imagen)
imagenfloat= skimage.img_as_float(array)
if mostrar:
fig, axes = plt.subplots(1, 1, figsize=(8, 4))
ax = axes.ravel()
ax[0].imshow(imagenfloat)
ax[0].set_title("Original")
ax[1].imshow(imagenfloat, cmap=plt.cm.magma)
ax[1].set_title("Magama")
ax[2].imshow(imagenfloat, cmap=plt.cm.gray)
ax[2].set_title("Gray")
fig.tight_layout()
plt.show()
return imagenfloat, label
def creacionDataset(num=3064):
image_train = []
label_train = []
for i in range (1, num):
imagen = leerMat("data/" + str(i) + ".mat")[0];
if imagen != []:
image_train.append(segmentacion(eliminacionRuido(imagen)))
label_train.append(int(leerMat("data/" + str(i) + ".mat")[1])- 1)
return image_train,label_train
def clasificacion():
data =creacionDataset()
print("soy una gueva")
#use scikit.learn.datasets in the future
print(len(data[0]),"gonorrea",len(data[1]))
image_train = np.array(data[0])
label_train = np.array(data[1])
image_train =image_train.reshape(image_train.shape[0], image_train.shape[1] * image_train.shape[2])
label_train = label_train.reshape(label_train.shape[0], )
image_train, label_train = shuffle(image_train, label_train, random_state=42)
x_train, x_test, y_train, y_test = model_selection.train_test_split(image_train, label_train, test_size = .3, random_state = 42)
#build 3 layer DNN with 10 20 10 units respectively
feature_columns = [tf.contrib.layers.real_valued_column("", dimension=1)]
classifier = learn.DNNClassifier(feature_columns=feature_columns, hidden_units=[10,20,10],n_classes=3)
# #fit and predict
classifier.fit(x_train, y_train, steps = 200)
x_predict = classifier.predict_classes(x_test)
x_predict = [x for x in x_predict ]
score = metrics.accuracy_score(y_test, x_predict)
print('Accuracy: {0:f}'.format(score))
if __name__ == "__main__":
clasificacion()
| [
"[email protected]"
] | |
eab0ae860ce7d4455552dd826541ec4705c7b60c | b9a69d7de64b96623d519394d59286f774e92c1f | /mnist-classification/src/model/net.py | 834f07afb7480521e83b753958f8fc64e4e204b6 | [
"MIT"
] | permissive | Ronalmoo/DataScience | abb20320c7fe5b68192e1ceeb956eb199bb1aecb | 175f610acaa50898a3899bfaddfbb5530ef640a9 | refs/heads/main | 2023-05-02T23:44:40.824085 | 2021-05-26T08:01:31 | 2021-05-26T08:01:31 | 370,643,477 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,829 | py | import copy
from typing import Dict, List, Optional
import torch
import torch.nn as nn
from omegaconf import DictConfig, OmegaConf
from torchsummary import summary as torch_summary
class LinearBlock(nn.Module):
def __init__(
self,
in_features: int,
out_features: int,
bias: bool = False,
activation: Optional[Dict] = None,
) -> None:
"""[summary]
Args:
in_features (int): [description]
out_features (int): [description]
bias (bool, optional): [description]. Defaults to False.
activation (Optional[Dict], optional): [description]. Defaults to None.
"""
super(LinearBlock, self).__init__()
self.linear = nn.Linear(
in_features=in_features, out_features=out_features, bias=bias
)
self.activation = activation
if self.activation:
self.activation = getattr(nn, activation["type"])(**activation["args"])
def forward(self, x):
x = self.linear(x)
if self.activation:
x = self.activation(x)
return x
class ConvBlock(nn.Module):
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 3,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
bias: bool = True,
padding_mode: str = "zeros",
activation: Optional[Dict] = None,
pool: Optional[Dict] = None,
) -> None:
"""[summary]
Args:
in_channels (int): [description]
out_channels (int): [description]
kernel_size (int, optional): [description]. Defaults to 3.
stride (int, optional): [description]. Defaults to 1.
padding (int, optional): [description]. Defaults to 0.
dilation (int, optional): [description]. Defaults to 1.
groups (int, optional): [description]. Defaults to 1.
bias (bool, optional): [description]. Defaults to True.
padding_mode (str, optional): [description]. Defaults to "zeros".
batch_norm (bool, optional): [description]. Defaults to False.
activation (Optional[Dict], optional): [description]. Defaults to None.
pool (Optional[Dict], optional): [description]. Defaults to None.
"""
super(ConvBlock, self).__init__()
self.conv = nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
padding_mode=padding_mode,
)
self.activation = activation
if self.activation:
self.activation = getattr(nn, activation["type"])(**activation["args"])
self.pool = pool
if self.pool:
# yaml not supported tuple. omegaconf too
pool_dict = dict(pool)
kernel_size = tuple(list(pool.args.kernel_size))
old_args = pool_dict.pop("args", None)
new_args = {}
for key in old_args.keys():
if key == "kernel_size":
continue
new_args.update({key: old_args[key]})
new_args.update({"kernel_size": kernel_size})
pool_dict.update({"args": new_args})
self.pool = getattr(nn, pool_dict["type"])(**pool_dict["args"])
def forward(self, x) -> torch.Tensor:
x = self.conv(x)
if self.activation:
x = self.activation(x)
if self.pool:
x = self.pool(x)
return x
def _build_linear_layers(linear_layers_config: DictConfig) -> torch.nn.ModuleList:
return nn.ModuleList([LinearBlock(**params) for params in linear_layers_config])
def _build_conv_layers(conv_layers_config: DictConfig) -> torch.nn.ModuleList:
return nn.ModuleList([ConvBlock(**params) for params in conv_layers_config])
def _build_output_layer(output_layer_config) -> torch.nn.Module:
return getattr(nn, output_layer_config["type"])(**output_layer_config["args"])
class LeNet(nn.Module):
CLASS_MAP = {
0: "0",
1: "1",
2: "2",
3: "3",
4: "4",
5: "5",
6: "6",
7: "7",
8: "8",
9: "9",
}
def __init__(self, model_config: DictConfig) -> None:
"""[summary]
Args:
model_config (DictConfig): [description]
"""
super(LeNet, self).__init__()
self._width: int = model_config.params.width
self._height: int = model_config.params.height
self._channels: int = model_config.params.channels
self.input_shape: tuple = (self._channels, self._height, self._width)
self.in_channels: int = self._channels
self.conv_layers: nn.ModuleList = _build_conv_layers(
conv_layers_config=model_config.params.feature_layers.conv
)
self.linear_layers: nn.ModuleList = _build_linear_layers(
linear_layers_config=model_config.params.feature_layers.linear
)
self.output_layer = _build_output_layer(
output_layer_config=model_config.params.output_layer
)
self.loss_fn = nn.CrossEntropyLoss()
def forward(self, x):
for conv_layer in self.conv_layers:
x = conv_layer(x)
x = x.view(x.size()[0], -1)
for linear_layer in self.linear_layers:
x = linear_layer(x)
return x
def loss(self, x, y):
return self.loss_fn(x, y)
def inference(self, x: torch.Tensor):
outputs = self.forward(x)
outputs = self.output_layer(outputs)
outputs = outputs.to("cpu")
output_shape = outputs.shape
predictions = []
for i in range(output_shape[0]):
indices = int(torch.topk(outputs[i], 1).indices.squeeze().numpy())
predictions.append(self.CLASS_MAP[indices])
return predictions
def summary(self):
# torchsummary only supported [cuda, cpu]. not cuda:0
device = str(self.device).split(":")[0]
torch_summary(
self,
input_size=(self._channels, self._height, self._width),
device=device,
)
@property
def device(self):
devices = {param.device for param in self.parameters()} | {
buf.device for buf in self.buffers()
}
if len(devices) != 1:
raise RuntimeError(
"Cannot determine device: {} different devices found".format(
len(devices)
)
)
return next(iter(devices))
| [
"[email protected]"
] | |
cea6cd25d2468ada9807bec6d45b481c79331e03 | 98895f2c4a11195495e4e94d7b1821d3ec229f17 | /blockchain-env/Lib/site-packages/pubnub/models/consumer/pubsub.py | 8c8e3eac97b429df2b278505652433e00885d04c | [
"MIT"
] | permissive | zarif007/Block-Chain-Web-App | 130a0e44fb84c622a6fbc496ead1a45b0caf6065 | 40bd4d8d8ce1f6de2840792290bf022d7dfacbb4 | refs/heads/main | 2023-02-20T01:32:41.974440 | 2021-01-23T20:04:01 | 2021-01-23T20:04:01 | 317,946,069 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,655 | py | import six
from pubnub.models.consumer.message_actions import PNMessageAction
class PNMessageResult(object):
def __init__(self, message, subscription, channel, timetoken, user_metadata=None, publisher=None):
assert message is not None
if subscription is not None:
assert isinstance(subscription, six.string_types)
if channel is not None:
assert isinstance(channel, six.string_types)
if publisher is not None:
assert isinstance(publisher, six.string_types)
assert isinstance(timetoken, six.integer_types)
if user_metadata is not None:
assert isinstance(user_metadata, object)
self.message = message
# DEPRECATED: subscribed_channel and actual_channel properties are deprecated
# self.subscribed_channel = subscribed_channel <= now known as subscription
# self.actual_channel = actual_channel <= now known as channel
self.channel = channel
self.subscription = subscription
self.timetoken = timetoken
self.user_metadata = user_metadata
self.publisher = publisher
class PNSignalMessageResult(PNMessageResult):
pass
class PNFileMessageResult(PNMessageResult):
def __init__(
self, message, subscription,
channel, timetoken, publisher,
file_url, file_id, file_name
):
super(PNFileMessageResult, self).__init__(message, subscription, channel, timetoken, publisher=publisher)
self.file_url = file_url
self.file_id = file_id
self.file_name = file_name
class PNPresenceEventResult(object):
def __init__(self, event, uuid, timestamp, occupancy, subscription, channel,
timetoken, state, join, leave, timeout, user_metadata=None):
assert isinstance(event, six.string_types)
assert isinstance(timestamp, six.integer_types)
assert isinstance(occupancy, six.integer_types)
assert isinstance(channel, six.string_types)
assert isinstance(timetoken, six.integer_types)
if user_metadata is not None:
assert isinstance(user_metadata, object)
if state is not None:
assert isinstance(state, dict)
self.event = event
self.uuid = uuid
self.timestamp = timestamp
self.occupancy = occupancy
self.state = state
self.join = join
self.leave = leave
self.timeout = timeout
# DEPRECATED: subscribed_channel and actual_channel properties are deprecated
# self.subscribed_channel = subscribed_channel <= now known as subscription
# self.actual_channel = actual_channel <= now known as channel
self.subscription = subscription
self.channel = channel
self.timetoken = timetoken
self.user_metadata = user_metadata
class PNMessageActionResult(PNMessageAction):
def __init__(self, result):
super(PNMessageActionResult, self).__init__(result)
class PNPublishResult(object):
def __init__(self, envelope, timetoken):
"""
Representation of publish server response
:param timetoken: of publish operation
"""
self.timetoken = timetoken
def __str__(self):
return "Publish success with timetoken %s" % self.timetoken
class PNFireResult(object):
def __init__(self, envelope, timetoken):
"""
Representation of fire server response
:param timetoken: of fire operation
"""
self.timetoken = timetoken
def __str__(self):
return "Fire success with timetoken %s" % self.timetoken
| [
"[email protected]"
] | |
c195715efd133bb7dc0c497d1178122f5a11b55f | 5ae45b99eb9c590f64f3a7d3f1b26915bef87831 | /solutions/040/040.py | 14a25ca0a1cb4b5aab7c9e665966f2f5922edfd8 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | jwmcgettigan/project-euler-solutions | 5c4cb7fb32e9ba4f3d9b0edc89853275c302ee03 | f06b6551e713619d5fd1359ee2f96fcff61c425b | refs/heads/master | 2023-07-13T21:24:03.961946 | 2021-08-19T21:03:12 | 2021-08-19T21:03:12 | 289,103,259 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,121 | py | """
Project Euler - Problem Solution 040
Problem Title - Champernowne's constant
Copyright (c) Justin McGettigan. All rights reserved.
https://github.com/jwmcgettigan/project-euler-solutions
"""
#TODO: Come back and make this more readable.
def fractional_part(nth):
''' Finds the nth digit of Champernowne's constant. '''
summation, num_sum = 0, 0
multiplier, series_num = 0, 0
while summation < nth:
multiplier += 1
num_sum += series_num
series_num = 9*(10**(multiplier-1))
summand = series_num*multiplier
summation += summand
summation -= summand
digits_in = nth-summation
numbers_in = digits_in // multiplier
chars_extra = digits_in % multiplier
number = num_sum + numbers_in + (chars_extra != 0)
return int(str(number)[chars_extra - (chars_extra != 0)])
def champernownes_constant():
''' Finds the product of the nth digits of Champernowne's constant. '''
product = 1
nth_digits = [1, 10, 100, 1000, 10000, 100000, 1000000]
for nth in nth_digits:
product *= fractional_part(nth)
return product
if __name__ == "__main__":
print(champernownes_constant())
| [
"[email protected]"
] | |
23583dd8d89367e9278342fad2e570d58355e578 | e236c4e0b74261230b4a5419d077afb9fca14a93 | /italy/aggregate_italy_data.py | 03bf9481a31c6a2f5858a3612ab33a99cf3612bc | [] | no_license | necsi/database | 6617ad57d5efa9bb979b89befe578bf89900aa66 | 07d94742b26daf1628cfe25429b0b32bb79bfbf1 | refs/heads/master | 2021-02-13T17:19:28.725607 | 2020-04-29T01:28:42 | 2020-04-29T01:28:42 | 244,716,306 | 6 | 4 | null | 2020-03-10T20:05:01 | 2020-03-03T18:52:41 | Python | UTF-8 | Python | false | false | 3,576 | py | import requests
import codecs
import csv
from contextlib import closing
import numpy as np
import pandas as pd
import json
from datetime import datetime, timedelta
def download_csv_to_dataframe(url):
"""
Download a CSV file and return a Pandas DataFrame.
:param url: str
:return: pandas.DataFrame
"""
with closing(requests.get(url, stream=True)) as r:
reader = csv.reader(codecs.iterdecode(r.iter_lines(), 'utf-8'), delimiter=',', quotechar='"')
data = [row for row in reader]
header_row = data[0]
data = data[1:]
df = pd.DataFrame(data = data, index=np.arange(1, len(data)+1), columns=header_row)
return df
def clean_italy_data(df):
"""
Clean italy data
:param df: pandas.DataFrame
:return: pandas.DataFrame
"""
df = df[['data', 'stato', 'denominazione_regione', 'denominazione_provincia', 'lat', 'long', 'totale_casi']]
df.columns = ['Last Updated', 'Country/Region', 'Region', 'Province/State', 'Latitude', 'Longitude', 'Confirmed']
df['Confirmed'] = df.Confirmed.apply(lambda x: int(x))
# Add expected columns
df_rows = df.shape[0]
df['City'] = np.repeat(np.nan, df_rows)
df['Deaths'] = np.repeat(np.nan, df_rows)
df['Recovered'] = np.repeat(np.nan, df_rows)
df['Source'] = np.repeat('https://raw.githubusercontent.com/pcm-dpc/COVID-19/master/dati-province/dpc-covid19-ita-province.csv', df_rows)
# Reorder columns
df = df[['Country/Region', 'Region', 'Province/State', 'City', 'Latitude', 'Longitude', 'Confirmed', 'Deaths', 'Recovered', 'Last Updated', 'Source']]
df['Country/Region'] = df['Country/Region'].apply(lambda x: 'Italy')
return df
def create_json_for_mapping_software(df):
"""
Clean italy data
:param df: pandas.DataFrame
:return: None
"""
# Convert Last Updated to datetime object
df['Last Updated'] = df['Last Updated'].apply(lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S'))
# Group by province
confirmed_by_region = df.groupby(['Region']).sum()[['Confirmed']].apply(lambda g: g.values.tolist()).to_dict()['Confirmed']
# Get daily confirmed case deltas
yesterday = datetime.now() - timedelta(days=1)
day_before_yesterday = datetime.now() - timedelta(days=2)
yesterday_confirmed_count_by_region = df[df['Last Updated'] >= yesterday].sort_values(by=['Last Updated']).groupby(['Region']).sum()[['Confirmed']].apply(lambda g: g.values.tolist()).to_dict()['Confirmed']
day_before_yesterday_confirmed_count_by_region = df[(df['Last Updated'] >= day_before_yesterday) & (df['Last Updated'] <= yesterday)].sort_values(by=['Last Updated']).groupby(['Region']).sum()[['Confirmed']].apply(lambda g: g.values.tolist()).to_dict()['Confirmed']
# Create required dictionary structure
format_for_map = {}
for key, value in yesterday_confirmed_count_by_region.items():
delta = value - day_before_yesterday_confirmed_count_by_region[key]
format_for_map[key] = {'scalerank': confirmed_by_region[key], 'one_day': delta}
# Save dictionary as json file
with open('italy/italy-confirmed-by-region.json', 'w') as json_file:
json.dump(format_for_map, json_file)
return None
# Download CSV to pandas Dataframe
df = download_csv_to_dataframe('https://raw.githubusercontent.com/pcm-dpc/COVID-19/master/dati-province/dpc-covid19-ita-province.csv')
# Clean data
df = clean_italy_data(df)
# Save CSV for later aggregation
df.to_csv('italy/italy-data.csv', index=False)
create_json_for_mapping_software(df)
| [
"[email protected]"
] | |
5b10666ae15a82142faedc726b836d3690ee74ac | c8e351c024f2df5f20c6166322290cd97554f529 | /MemSys.py | dcf1fdc1d018b4cdadd7b83fa268a9bb194be666 | [] | no_license | acadev/LipidBilayerAnalysis | a8c19b3058afc6677219a274aa54e776683a4bbe | f7ba9c73e45130cf044ec81829642960617e94f0 | refs/heads/master | 2020-12-26T00:46:48.947215 | 2016-08-23T22:43:31 | 2016-08-23T22:43:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84,710 | py | """
This MemSys module defines various classes and functions used to process and analyze a lipid
bilayer trajectory. This module assumes the structure and trajectory are initiallaly stored in MDAnalysis
objects and therefore processes MDAnalysis objects. The lipids constituting the bilayer are read in from
the MDAnalysis objects and are converted to center of mass (COM) representations. Lipids are partitioned
into an 'upper' and a 'lower' leaflet based on the z-position of the COM. The built-in analysis functions
then operate on the COM representations to compute quantities such as the lateral mean squared displacement.
Many analysis functions allow specification of the leaflet and type of lipid to perform the its analysis on.
The primary (parent class) is the MemSys class. The analysis functions are members of the MemSys class.
Example initialization:
import MemSys as ms
mem_sys = ms.MemSys(mda_universe.trajectory,mda_selection_of_bilayer_lipids)
"""
#imports
import numpy as np
import matplotlib.cm as cm
import os
import sys
import shutil
import shelve
import multiprocessing as mp
from scipy.spatial import Voronoi
from scipy.spatial import Delaunay
#import copy
#import my running stats class
from RunningStats import *
# import the coordinate wrapping function--for unwrapping
from pUnwrap import mda_wrap_coordinates,mda_wrap_coordinates_parallel
# assumes that a 1d numpy array of floats is pass as input, but
# does not check this
def GenRunningAverage(onednparray):
"""
Genates a running average array corresponding to
the data in a 1d numpy array.
Parameters
----------
onednparray : a 1d numpy array, assumed to be array of floats
Returns
-------
2d numpy array of dim len(onednparray)x2
2dnparray[i][0] = running mean at i
2dnparray[i][1] = running standard deviation at i
{i = 0; i < len(onednparray)}
"""
averager = RunningStats()
nele = len(onednparray)
output = np.zeros((nele,2))
for i in xrange(nele):
averager.Push(onednparray[i])
run_avg = averager.Mean()
run_dev = averager.Deviation()
output[i,0] = run_avg
output[i,1] = run_dev
return output
# This function is incomplete!
def ColorizeStepVectorClusters(vectors):
nvecs = len(vectors)
np.zeros(nvecs,dtype=np.int)
colors_out = np.zeros(nvecs)
return "nothing yet!"
class LipidCOM:
"""
A lipid center of mass (COM) object. This object stores the COM coordinats
of a lipid (or other molecule or group of atoms) computed from both the wrapped
and unwrapped atomic coordinates. This object also stores information about the
type of lipid as well as the total mass of the lipid.
"""
def __init__(self):
"""
This is the initialization function of the center of the LipidCOM object.
This function initializes all the LipidCOM instance attributes and assigns
some default values.
Parameters
----------
void
Returns
-------
void
"""
# lipid type/resname or other name
self.type="UNK"
# wrapped coordinates
self.com=np.zeros(3)
# unwrapped coordinates
self.com_unwrap=np.zeros(3)
# total mass
self.mass=1.0
return
# The name of this function could be changed to be more desriptive, e.g.
# extract_com_mda_residue
def extract(self, mda_residue, unwrap=False):
"""
This function "extracts" the center of mass (COM) of an MDAnalysis residue.
This function calls the MDAnalysis member function center_of_mass() of the residue
to compute the center of mass of the atoms constituting the residue.
Parameters
----------
mda_residue : an MDAnalysis residue object
unwrap : bool, Optional
False (default) - The COM coordinates are stored in the
container designated for the unwrapped coordinate representation.
True - The COM coordinates are stored in the container designated
for the wrapped coordinate representation
Returns
-------
void
"""
if unwrap:
self.com_unwrap = mda_residue.center_of_mass()
else:
self.com = mda_residue.center_of_mass(pbc=True)
self.com_unwrap = self.com[:]
self.type=mda_residue.resname
return
# a frame object
class Frame:
"""
A molecular dynamics Frame object. This object stores all the LipidCOM objects
corresponding to a specific timestep, as well as other information about that
timestep inluding the rectangular box dimensions, simulation time.
"""
# does not check that nlipids is an int
def __init__(self, nlipids):
"""
This is the initialization function of Frame object.
This function initializes all the Frame instance attributes and assigns
some default values.
Parameters
----------
nlipids : int, The number of lipids (LipidCOM objects) that this frame contains
Returns
-------
void
"""
# list to store the nlipids LipidCOM objects
self.lipidcom = []
# box dimensions
self.box = np.zeros(3)
# simulation time
self.time = np.zeros(1)
# frame number
self.number = np.zeros(1,dtype=np.int)
# initialize all the LipidCOM objects
for i in xrange(nlipids):
self.lipidcom.append(LipidCOM())
return
def SetBox(self, box_lengths):
"""
This member function is used to set the box dimensions of a Frame.
Parameters
----------
box_lengths : numpy array - 1d, 3 element numpy array containing the x,y,z box sizes
Returns
-------
void
"""
self.box = box_lengths
return
def SetTime(self, time):
"""
This member function is used to set the simulation time of a Frame.
Parameters
----------
time : float, simulation time
Returns
-------
void
"""
self.time = time
return
def __len__(self):
return len(self.lipidcom)
# def COG(self,unwrapped=False):
# cog_out = np.zeros(3)
# for lipid in self.lipidcom:
# if not unwrapped:
# cog_out+=lipid.com
# else:
# cog_out+=lipid.com_unwrap
# cog_out/=len(self)
# return com_out
def COM(self, wrapped=True):
"""
This member function is used to compute the overall center of mass (COM) of a Frame.
This function uses the LipidCOM object coordinates and masses to compute the COM of
the frame.
Parameters
----------
unwrap : bool, Optional
True (default) - The wrapped LipidCOM coordinates are used to compute
the COM of the frame
False - The unwrapped LipidCOM coordinates are used to compute
the COM of the frame
Returns
-------
frame_com : float, center of mass of the Frame
"""
com_out = np.zeros(3)
total_mass = 0.0
for lipid in self.lipidcom:
if wrapped:
com_out+=lipid.com*lipid.mass
total_mass+=lipid.mass
else:
com_out+=lipid.com_unwrap*lipid.mass
total_mass+=lipid.mass
com_out/=total_mass
return com_out
#frame wrapper - the name of this class may be changed. e.g. FrameShelve
class frames:
"""
This is a wrapper class for the Frame object that stores a set of Frame objects
corresponding to a molecular dynamics trajectory. This class saves the Frame objects
on disk using the shelve module and provides an interface to access instances of
those Frames. This class defines an append function and some built-ins to allow integer indexing
of the frames object (like an array) to add/get instances of Frame objects corresponding to that index.
"""
_type_error ="instance of object MemSys.frames only excepts instances of MemSys.Frame"
def __init__(self,prefix='/tmp/',save=False):
"""
This is the initialization function of the frames object.
This function initializes all the frame instance attributes and assigns
some default values.
Parameters
----------
prefix : string, Optional; The location to store the "shelve"d Frame data.
'/tmp/' (default) - The data is stored in the unix/linux tmp directory.
save : bool, Optional; determine whether to delete the shelved Frame data after object deletion
False (default) - the shelved Frame data is deleted upon calling __del__
True - the shelved Frame data is not deleted when __del__ is called
Returns
-------
void
"""
self.nframes = 0
self.pid = os.getpid()
if prefix == 'Default':
prefix = '/tmp/'
if prefix[-1] != '/':
prefix = prefix +'/'
path = prefix
if save:
path = path+'mem_sys_frames'
else:
path = path+'.mem_sys_frames_'+str(self.pid)
self.path = path
self.save = save
if os.path.isdir(self.path):
shutil.rmtree(self.path)
os.mkdir(self.path, 0755)
self.fs_name = self.path +'/shelf_frames.db'
self.frame_shelf = shelve.open(self.fs_name,flag="c", protocol=2)
return
def __del__(self):
"""
Non-standard implementation for the __del__ built-in.
Closes the Frame shelve database file and deletes the shelved Frame
data if the frames.save parameter is False
Parameters
----------
void
Returns
-------
void
"""
self.frame_shelf.close()
if not self.save:
if os.path.isdir(self.path):
shutil.rmtree(self.path)
return
def append(self,item):
"""
This member function allows tail append/addition like fucntionality for a Frame object. The new Frame
is added to the shelve database with a key n_frames and the number of Frames is incremented by 1.
Parameters
----------
item : The instance of a Frame object to be appended
Returns
-------
void, TypeError: Returns a TypeError if item passed for appending is not a Frame instance.
"""
if isinstance(item, Frame):
self.frame_shelf[str(self.nframes)] = item
self.nframes+=1
return
else:
return TypeError(self._type_error)
def __getitem__(self,key):
"""
Non-standard implementation for the __getitem__ built-in to allow integer
indexing of the frames object. This allows acces to the Frame objects by an
integer indexing key, which are stored in the shelve database files.
Parameters
----------
key : int - The index of the Frame object being called
Returns
-------
Frame_obj : This is an instance of the Frame object stored at index key (pulled from the shelve database)
"""
if key < 0:
key += self.nframes
elif key > self.nframes:
key = self.nframes-1
return self.frame_shelf[str(key)]
def __setitem__(self,key,item):
"""
Non-standard implementation for the __setitem__ built-in to allow integer
indexing of the frames object. This allows the Frame stored at the index key to set.
Parameters
----------
key : int - The index of where the input Frame should be stored.
item : Frame object - This is an instance of a Frame object to be stored at index key.
Returns
-------
void, TypeError : This function returns a TypeError if the input item is not an instance of a Frame object
"""
if not isinstance(item, Frame):
return TypeError(self._type_error)
if key < 0:
key+=self.nframes
elif key >= self.nframes:
key = self.nframes
self.nframes+=1
self.frame_shelf[str(key)]=item
return
def __len__(self):
return self.nframes
def __iadd__(self,item):
"""
Non-standard implementation for the __iadd__ built-in which allows a Frame object
to be appended using the '+=' operator.
Parameters
----------
item : Frame object - This is an instance of a Frame object to be appended.
Returns
-------
"""
self.append(item)
return self
# the multiprocessor parallelized functions that get copies of this object
# still return:
# Exception OSError: OSError(2, 'No such file or directory') in ignored
# I'm not sure why, but it is marked as ignored and it doesn't seem to cause any problems with the Frame shelve
# database file.
class par_frames:
"""
This class is effectively used to generate read-only copies of the frames class, which can be passed
to functions that do parallelized computations over the number of frames.
"""
# fs_name does not actually get used, so it is deprecated and should probably be removed at some point.
def __init__(self, nframes, fs_name, frame_shelve):
"""
This is the initialization function of the par_frames object.
This functions stors a copy of an existing Frame shelve file object.
Parameters
----------
nframes : int - the number of Frames stored in the shelve database
fs_name : string - the name (prefix) of the shelve database file
frame_shelve : the shelve file object storing the Frames to be accessible by this object.
Returns
-------
void
"""
self.nframes = nframes
self.fs_name = fs_name
#print "par_frames instance"
#print "self.nframes ",self.nframes
#print "self.fs_name ",self.fs_name
#self.frame_shelf = shelve.open(self.fs_name,flag="r", protocol=2)
self.frame_shelf = frame_shelve
return
# def __del__(self):
# self.frame_shelf.close()
# return
def __getitem__(self,key):
"""
Non-standard implementation for the __getitem__ built-in to allow integer
indexing of the par_frames object. This allows acces to the Frame objects stored in the shelve database using
an integer indexing key.
Parameters
----------
key : int - The index of the Frame object being called
Returns
-------
Frame_obj : This is an instance of the Frame object stored at index key (pulled from the shelve database)
"""
if key < 0:
key += self.nframes
elif key > self.nframes:
key = self.nframes-1
return self.frame_shelf[str(key)]
def __len__(self):
return self.nframes
# leaflet object
class Leaflet:
"""
This class object is used to group lipids together according to their bilayer leaflet. It is primarily meant to
store the indices of LipidCOMs as they are in a Frame.lipidcom list. This class also
creates sub-groups within the Leaflet based on the LipidCOM.type using LipidGroup objects.
"""
def __init__(self, name):
"""
This is the initialization function of Leaflet object.
This functions initializes the lists and dicts necessary to hold
the Leaflet data.
Parameters
----------
name : string - the name of the bilayer leaflet being initialized ('upper' and 'lower' are used by the MemSys class)
Returns
-------
void
"""
#the name of the leaflet - e.g. 'upper' or 'lower'
self.name = name
#initialize a list to store the indices of lipids assigned to this leaflet
self.members = []
#initialize a list to hold the LipidGroup objects
self.groups = []
#initialize a dictionary to store the self.groups index of LipidGroup objects
self.group_dict = {}
return
def __str__(self):
return '%s leaflet of a Membrane System with %s members and %s lipid groups' % (self.name, len(self.members), len(self.groups))
def __repr__(self):
return '%s leaflet of a Membrane System with %s members and %s lipid groups' % (self.name, len(self.members), len(self.groups))
def __len__(self):
return len(self.members)
#consider changing var name of input 'resname' to something that doesn't conflict with LipidCOM.type
def AddMember(self, index, resname):
"""
This member function allows new lipids (by Frame.lipidcom index) to be added to the Leaflet.
Parameters
----------
index : The index of the lipid being added to the Leaflet
resname : the resname (or LipidCOM.type) of the lipid being added.
Returns
-------
void
"""
if len(self.members) == 0:
self.members.append([index, resname])
self.groups.append(LipidGroup(resname))
self.groups[0].AddMember(index)
self.group_dict.update({resname:0})
else:
self.members.append([index, resname])
addgroup = True
group_ind = 0
for rn in self.groups:
if resname == rn.lg_name:
addgroup = False
break
group_ind+=1
if addgroup:
self.groups.append(LipidGroup(resname))
ng = len(self.groups)
self.groups[ng-1].AddMember(index)
self.group_dict.update({resname: ng-1})
else:
self.groups[group_ind].AddMember(index)
#self.members=sorted(self.members,key=lambda self.members:self.members[1])
return
def GetGroupIndices(self, group_name):
"""
This member function returns the list of indices grouped in the LipidGroup object
with LipidGroup.lg_name matching the input name. This allows for selections of LipidCOMs of a specific type.
Parameters
----------
group_name : string - The name of the group (resname of the lipids) that indices are to returned.
Passing the string 'all' will return indices of all the lipids assigned to
the Leaflet instance. If the group_name is not recognised (i.e. is not in the group_dict)
The function defaults to 'all'.
Returns
-------
void
"""
indices = []
if group_name == "all":
for element in self.group_dict:
gindex = self.group_dict[element]
indices += self.groups[gindex].lg_members
elif group_name in self.group_dict:
gindex = self.group_dict[group_name]
indices = self.groups[gindex].lg_members
else:
#unkwown group name- print warning and use the default "all"
print "!! Warning - request for unknown Lipid Group \'",group_name,"\' from the ",self.name," leaflet"
print "!! using the default \"all\""
for element in self.group_dict:
gindex = self.group_dict[element]
indices += self.groups[gindex].lg_members
return list(indices)
def GetMemberIndices(self):
"""
This member function returns the list of indices for the lipids grouped in the Leaflet instance.
Parameters
----------
void
Returns
-------
indices : list - a list of integer indices of the lipids in the Leaflet instance
"""
indices = []
for element in self.members:
indices.append(element[0])
return list(indices)
def HasGroup(self, group_name):
"""
This member function provides a way to check if there is LipidGroup in the Leaflet instance with the input
name.
Parameters
----------
group_name : string - The name to checked against existing LipidGroup names
Returns
-------
answer : bool - True if there is a LipidGroup with name group_name, False otherwise
"""
return [group_name in self.group_dict]
def NumGroups(self):
"""
This member function returns the number of unique LipidGroups that have initialized within
an instance Leaflet
Parameters
----------
none
Returns
-------
number_of_groups : int - The number of unique LipidGroups
"""
return len(self.groups)
def GetGroupNames(self):
"""
This member function returns the list of LipidGroup names that current exist in the
the Leaflet instance
Parameters
----------
void
Returns
-------
names : list - a list of string LipidGroup names
"""
return [group.lg_name for group in self.groups]
class LipidGroup:
"""
This class object is used to group lipids together according to their type/resname/name. It is primarily meant to
store the indices of the LipidCOMs as they are in a Frame.lipidcom list.
Lipid members are added dynamically using the AddMember function.
"""
def __init__(self, name):
"""
This is the initialization function of LipidGroup object.
This functions initializes the list to store its members indices.
This function also sets the name of the LipidGroup object instance.
Parameters
----------
name : string - the name/type/resname of the lipids being grouped in this object
Returns
-------
void
"""
#initialize a list to hold the member indices
self.lg_members = []
# the name of this lipid group
self.lg_name = name
return
def AddMember(self, new_mem):
"""
This member function allows dynamic addition (via appending to the member list) of
lipids via their index to the current LipidGroup instance.
Parameters
----------
new_mem : int - the index of the lipid being added to this lipid group
Returns
-------
void
"""
self.lg_members.append(new_mem)
return
def name(self):
"""
This a member function to return the name of the current LipidGroup instance.
Parameters
----------
void
Returns
-------
name : string - the name of the lipid group (i.e. lg_name)
"""
return self.lg_name
def MSD_frames(frames, fstart, fend, indices, refframe, plane):
"""
This function allows the mean squared displacement (MSD) to be computed
for a specified subset of the Frames in a frames (or par_frames) object.
This function was created to be called from the function MemSys.CalcMSD_parallel
as a function to be passed to the multiprocessor threads.
Parameters
----------
frames : frames or par_frames object - object containing all the Frames of the trajectory
fstart : int - the first frame to start the analysis on
fend : int - the last frame to analyze
indices : list - list of integer indices of the LipidCOMs to include in the computation
refframe : int - the index of the frame that is to be taken as the reference for the MSD computation
plane : list - list of the indices corresponding to the coordinate planes (x: 0,y 1,z :2) to be included in the MSD
Returns
-------
msd_results - numpy array (floats) - This is a num_framesx4 numpy array containing the
results of the MSD computation for the specified frames
msd_results[i,0] = simulation time for frame f = i + fstart
msd_results[i,1] = the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
msd_results[i,2] = the standard deviation of the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
msd_results[i,3] = an estimate of the corrsponding diffusion constant based on
the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
{i = 0; i < num_frames}
"""
#initialize an array to hold the ouptut
nfc = fend - fstart + 1
output = np.zeros((nfc,4))
# number of lipids in the selection
n_com = len(indices)
#initialize a running stats object to do the configuration averaging
drs_stat = RunningStats()
# initialize an np array to hold coordinates for the selection
# at the reference frame
com_ref = np.zeros((n_com,2))
ref_frame = frames[refframe]
count=0
# get the coordinates
for i in indices:
com_i = ref_frame.lipidcom[i].com_unwrap[plane]
com_ref[count]=com_i[:]
count+=1
time_ref = ref_frame.time
#print "nframes ",len(frames)
#print "process; fstart ",fstart," fend ",fend
#print "process; loop range "
#print range(fstart,(fend+1))
# now begin loop over the frames for this process
for f in range(fstart, (fend+1)):
# get the current frame
curr_frame = frames[f]
# get the coordinates for the selection at this frame
com_curr = np.zeros((n_com,2))
count=0
for i in indices:
com_i = curr_frame.lipidcom[i].com_unwrap[plane]
com_curr[count]=com_i[:]
count+=1
#current time
tc = curr_frame.time
dr = com_curr - com_ref
drs = dr*dr
#loop over the selections for this frame
for val in drs:
drs_curr = val[:]
drs_mag = drs_curr.sum()
drs_stat.Push(drs_mag)
#get the msd for the current selection
msdcurr = drs_stat.Mean()
devcurr = drs_stat.Deviation()
drs_stat.Reset()
findex = f-fstart
output[findex,0]=tc
output[findex,1]=msdcurr
output[findex,2]=devcurr
dt = tc - time_ref
DiffCon = 0.0
if f != 0:
DiffCon = msdcurr/(4.0*dt)
output[findex,3]=DiffCon
# print "msdcurr ",msdcurr," DiffCon ",DiffCon
return output
#function to compute the thickness of the membrane (in the normal direction). The algorithm is based on
# the GridMAT-MD bilayer thickness calculation (except without the gridding procedure)
def Thickness_frames(frames, fstart, fend, leaflets, nlipids, plane, norm):
"""
This function allows the bilayer "thickness" to be computed
for a specified subset of the Frames in a frames (or par_frames) object.
This function was created to be called used in the function MemSys.CalcThickness_parallel
as a function to be passed to the multiprocessor threads.
Parameters
----------
frames : frames or par_frames object - object containing all the Frames of the trajectory
fstart : int - the first frame to start the analysis on
fend : int - the last frame to analyze
leaflets : dict - the MemSys.leaflets instance used to define the Leaflets for this calculation
This input should contain the two keys, 'upper' and 'lower', corresponding
to instances of the Leaflet class.
nlipids : int - the total number of LipidCOMs (or lipids) in the Leaflets
plane : list - list of the indices corresponding to the bilayer lateral coordinate planes (x: 0,y 1,z :2)
norm : int - index corresponding to the bilayer normal coordinate plane (x: 0,y 1,z :2)
Returns
-------
msd_results - numpy array (floats) - This is a num_framesx4 numpy array containing the
results of the MSD computation for the specified frames
msd_results[i,0] = simulation time for frame f = i + fstart
msd_results[i,1] = the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
msd_results[i,2] = the standard deviation of the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
msd_results[i,3] = an estimate of the corrsponding diffusion constant based on
the configurational average MSD over the specified LipidCOMs for frame f = i + fstart
{i = 0; i < num_frames}
"""
#upper_match = []
#lower_match = []
xi = plane[0]
yi = plane[1]
zi = norm
comcup = np.zeros(3)
comclo = np.zeros(3)
dcom = np.zeros(3)
nfc = fend - fstart + 1
nlc = nlipids
zdists = np.zeros((nfc, nlc, 1))
zmaps = np.zeros((nfc, nlc, 6))
#dcoms = np.zeros(3)
f=0
times = np.zeros(nfc)
for f in range(fstart,(fend+1)):
n=0
fr = frames[f]
boxc = fr.box
boxc_xh = boxc[xi]/2.0
boxc_yh = boxc[yi]/2.0
dt = fr.time
findex = f-fstart
times[findex]=dt
for memu in leaflets['upper'].members:
idu = memu[0]
comcup = fr.lipidcom[idu].com
distxy = 10000.0
distz = 0.0
mindex = 0
zlom = 0.0
zhim = 0.0
xavgm = 0.0
yavgm = 0.0
for meml in leaflets['lower'].members:
idl = meml[0]
comclo = fr.lipidcom[idl].com
dcom = comcup-comclo
dx = dcom[xi]
dy = dcom[yi]
dz = dcom[zi]
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comcup[xi]-boxc_xh) - np.absolute(comclo[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comcup[yi]-boxc_yh) - np.absolute(comclo[yi]-boxc_yh)
rxy = np.sqrt(dx**2+dy**2)
#get 4d map values
comavg = (comcup+comclo)/2.0
xavg = comavg[xi]
yavg = comavg[yi]
zlo = comclo[zi]
zhi = comcup[zi]
if rxy<distxy:
distxy=rxy
distz = np.absolute(dz)
mindex=meml
xavgm = xavg
yavgm = yavg
zlom = zlo
zhim = zhi
#upper_match.append([mindex,distz])
#print "n ",n," xvg ", xavgm," yvg ", yavgm
zdists[findex,n]=distz
#maps
zmaps[findex,n,0]=dt
zmaps[findex,n,1]=xavgm
zmaps[findex,n,2]=yavgm
zmaps[findex,n,3]=zlom
zmaps[findex,n,4]=zhim
zmaps[findex,n,5]=distz
n+=1
for meml in leaflets['lower'].members:
idl = meml[0]
comclo = fr.lipidcom[idl].com
distxy = 10000.0
distz = 0.0
mindex = 0
zlom = 0.0
zhim = 0.0
xavgm = 0.0
yavgm = 0.0
for memu in leaflets['upper'].members:
idu = memu[0]
comcup = fr.lipidcom[idu].com
dcom = comclo-comcup
dx = dcom[xi]
dy = dcom[yi]
dz = dcom[zi]
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comclo[xi]-boxc_xh) - np.absolute(comcup[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comclo[yi]-boxc_yh) - np.absolute(comcup[yi]-boxc_yh)
rxy = np.sqrt(dx**2+dy**2)
#get 4d map values
comavg = (comcup+comclo)/2.0
xavg = comavg[xi]
yavg = comavg[yi]
zlo = comclo[zi]
zhi = comcup[zi]
if rxy<distxy:
distxy=rxy
distz = np.absolute(dz)
mindex=meml
xavgm = xavg
yavgm = yavg
zlom = zlo
zhim = zhi
#upper_match.append([mindex,distz])
#print "n ",n," xvg ", xavgm," yvg ", yavgm
zdists[findex,n]=distz
#maps
zmaps[findex,n,0]=dt
zmaps[findex,n,1]=xavgm
zmaps[findex,n,2]=yavgm
zmaps[findex,n,3]=zlom
zmaps[findex,n,4]=zhim
zmaps[findex,n,5]=distz
n+=1
#break
zavgs = np.zeros((nfc, 3))
zdtstat = RunningStats()
for fr in xrange(nfc):
currtime = times[fr]
dt = currtime
curr = zdists[fr,:]
zavgcurr = curr.mean()
zdevcurr = curr.std()
# zdtstat.Push(zavgcurr)
# zdtcurr = zdtstat.Mean()
# zdtdcurr = zdtstat.Deviation()
zavgs[fr,0]=dt
zavgs[fr,1]=zavgcurr
zavgs[fr,2]=zdevcurr
# zavgs[fr,3]=zdtcurr
# zavgs[fr,4]=zdtdcurr
out = [zavgs,zmaps]
return out
#return zavgs
#return zmaps
## this is the main class - the Membrane System (MemSys) object
class MemSys:
# pass the mda anaylis trajectory object and a selection with the membrane (i.e. w/o water and ions)
# optional - specify the plane that the membrane is in - default is xy with normal in z
def __init__(self, mda_traj, mem_sel, plane="xy",fskip=1,frame_path='Default',frame_save=False,nprocs=1):
#defaults - xy plane with z normal
ii=0
jj=1
kk=2
if plane=="yz" or plane=="zy":
ii=1
jj=2
kk=0
if plane=="xz" or plane=="zx":
ii=0
jj=2
kk=1
#parallelize loading -- currently just applies to unwrapping
parallel=False
if nprocs>1:
parallel=True
#store the indices of the plane directions
self.plane = [ii, jj]
# store the index of the normal direction
self.norm = kk
#initialize leaflet objects
self.leaflets = {'upper':Leaflet('upper'),'lower':Leaflet('lower')}
self.com_leaflet = []
#get the number of lipids (residues)
self.nlipids=mem_sel.n_residues
#initialize an empty cluster list - used to store the clusters built in the last call of 'CheckClustering'
self.clusters = [] # after 'CheckClustering' is called, the outersize len(self.clusters) should equal self.nframes
#initialize empty frame list
#self.frame=[]
self.frame = frames(prefix=frame_path,save=frame_save)
#loop over the frames
f=0
for frame in mda_traj[::fskip]:
print "doing frame ",frame.frame
#add the frame object for this frame
cframe = Frame(self.nlipids)
# set the box dimensions and the time for this frame
cframe.SetBox(frame.dimensions[0:3])
cframe.SetTime(frame.time)
#print "time ",frame.time
cframe.number = f
# loop over the residues (lipids) and get the centers of mass
r=0
for res in mem_sel.residues:
cframe.lipidcom[r].extract(res)
cframe.lipidcom[r].mass = res.total_mass()
r+=1
#append the frame
self.frame.append(cframe)
f+=1
#get the number of frames from the trajectory
self.nframes = f
#now we need to unwrap the coordinates
natoms = len(mem_sel)
oldcoord = np.zeros((natoms,3))
currcoord = np.zeros((natoms,3))
wrapcoord = np.zeros((natoms,3))
index = mem_sel.indices
firstframe = True
# loop over the trajectory again to get unwrapped coordinates
# unwrap the raw residue coordinates - then get the COMs
f=0
for frame in mda_traj[::fskip]:
#first we unwrapp
print "unwrapping frame ",frame.frame
currcoord = frame._pos[index]
if firstframe:
oldcoord = np.copy(currcoord)
firstframe = False
else:
abc = frame.dimensions[0:3]
if parallel:
wrapcoord = mda_wrap_coordinates_parallel(abc, currcoord, oldcoord,nprocs=nprocs)
else:
wrapcoord = mda_wrap_coordinates(abc, currcoord, oldcoord)
frame._pos[index] = wrapcoord[:]
oldcoord = np.copy(wrapcoord)
#now we need to adjust for the center of mass motion of the membrane -- for simplicity set all frames to (0,0,0)
# to remove center of mass motion of the membrane
mem_com = mem_sel.center_of_mass()
frame._pos[index] -= mem_com
r=0
cframe = self.frame[f]
for res in mem_sel.residues:
cframe.lipidcom[r].extract(res, unwrap=True)
r+=1
self.frame[f]=cframe
f+=1
# now we can assign the lipids to the leaflets
# NOTE: Lipids are only assigned to leaflets once based on the
# first frame of the trajectory
#first- compute the average position along the normal direction
zstat = RunningStats()
for lipcom in self.frame[0].lipidcom:
zstat.Push(lipcom.com_unwrap[self.norm])
zavg = zstat.Mean()
# now loop over the lipids
l = 0
for lipcom in self.frame[0].lipidcom:
pos = ""
# decide which leaflet
if lipcom.com_unwrap[self.norm]>zavg:
pos = 'upper'
elif lipcom.com_unwrap[self.norm]<zavg:
pos = 'lower'
#add to the chosen leaflet
self.com_leaflet.append(pos)
self.leaflets[pos].AddMember(l, lipcom.type)
l+=1
#complete
return
def __str__(self):
return 'Membrane System with %s frames and %s lipids/components' % (self.nframes, self.nlipids)
def __repr__(self):
return 'Membrane System with %s frames and %s lipids/components' % (self.nframes, self.nlipids)
def NumberOfUniqueGroups(self):
resnames = []
for leaflet in self.leaflets:
for group in leaflet.groups:
gname = group.name()
if gname not in resnames:
resnames.append(gname)
return len(resnames)
#def LeafletCOM(leaflet_name,frame_num):
# function to compute the mean squared displace (msd) along with the diffusion constant of a group
def CalcMSD(self, leaflet="both",group="all"):
indices = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
#store the coordinates of the selected LipidCOMs in a single numpy array
selcoords = np.zeros((self.nframes,n_com,2))
for f in xrange(self.nframes):
count=0
for i in indices:
com_curr = self.frame[f].lipidcom[i].com_unwrap[self.plane]
selcoords[f,count]=com_curr[:]
count+=1
#initialize a numpy array to hold the msd for the selection
msd = np.zeros((self.nframes, 7))
#initialize a running stats object to do the averaging
drs_stat = RunningStats()
#initialize a running stats object for the diffusion constant (frame/time average)
diff_stat = RunningStats()
#running stats object for time averaging
msd_stat = RunningStats()
#loop over the frames starting at index 1
#print comlist
#print len(comlist)
coml0 = selcoords[0,:,:]
t0 = self.frame[0].time
#print coml0
for i in xrange(1, self.nframes):
# get the current com frame list
tc = self.frame[i].time
dt = tc
comlcurr = selcoords[i,:,:]
dr = comlcurr - coml0
drs = dr*dr
#loop over the selections for this frame
for val in drs:
drs_curr = val[:]
drs_mag = drs_curr.sum()
drs_stat.Push(drs_mag)
#get the msd for the current selection
msdcurr = drs_stat.Mean()
devcurr = drs_stat.Deviation()
drs_stat.Reset()
msd_stat.Push(msdcurr)
msd_tavg = msd_stat.Mean()
msd_dev = msd_stat.Deviation()
#dt = times[i]-times[0]
DiffCon = msd_tavg/(2.0*dim*dt)
diff_stat.Push(DiffCon)
#print "msdcurr ",msdcurr
#push to the msd array
msd[i,0]=dt
msd[i,1]=msdcurr
msd[i,2]=msd_tavg
msd[i,3]=msd_dev
msd[i,4]=DiffCon
msd[i,5]=diff_stat.Mean()
msd[i,6]=diff_stat.Deviation()
#return msd array
return msd
#function to compute the thickness of the membrane (in the normal direction). The algorithm is based on
# the GridMAT-MD bilayer thickness calculation (except without the gridding procedure)
def CalcMembraneThickness(self):
#upper_match = []
#lower_match = []
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
comcup = np.zeros(3)
comclo = np.zeros(3)
dcom = np.zeros(3)
zdists = np.zeros((self.nframes, self.nlipids, 1))
zmaps = np.zeros((self.nframes, self.nlipids, 6))
#dcoms = np.zeros(3)
f=0
for f in xrange(self.nframes):
n=0
fr = self.frame[f]
boxc = fr.box
boxc_xh = boxc[xi]/2.0
boxc_yh = boxc[yi]/2.0
dt = fr.time
for memu in self.leaflets['upper'].members:
idu = memu[0]
comcup = fr.lipidcom[idu].com
distxy = 10000.0
distz = 0.0
mindex = 0
zlom = 0.0
zhim = 0.0
xavgm = 0.0
yavgm = 0.0
for meml in self.leaflets['lower'].members:
idl = meml[0]
comclo = fr.lipidcom[idl].com
dcom = comcup-comclo
dx = dcom[xi]
dy = dcom[yi]
dz = dcom[zi]
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comcup[xi]-boxc_xh) - np.absolute(comclo[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comcup[yi]-boxc_yh) - np.absolute(comclo[yi]-boxc_yh)
rxy = np.sqrt(dx**2+dy**2)
#get 4d map values
comavg = (comcup+comclo)/2.0
xavg = comavg[xi]
yavg = comavg[yi]
zlo = comclo[zi]
zhi = comcup[zi]
if rxy<distxy:
distxy=rxy
distz = np.absolute(dz)
mindex=meml
xavgm = xavg
yavgm = yavg
zlom = zlo
zhim = zhi
#upper_match.append([mindex,distz])
#print "n ",n," xvg ", xavgm," yvg ", yavgm
zdists[f,n]=distz
#maps
zmaps[f,n,0]=dt
zmaps[f,n,1]=xavgm
zmaps[f,n,2]=yavgm
zmaps[f,n,3]=zlom
zmaps[f,n,4]=zhim
zmaps[f,n,5]=distz
n+=1
for meml in self.leaflets['lower'].members:
idl = meml[0]
comclo = fr.lipidcom[idl].com
distxy = 10000.0
distz = 0.0
mindex = 0
zlom = 0.0
zhim = 0.0
xavgm = 0.0
yavgm = 0.0
for memu in self.leaflets['upper'].members:
idu = memu[0]
comcup = fr.lipidcom[idu].com
dcom = comclo-comcup
dx = dcom[xi]
dy = dcom[yi]
dz = dcom[zi]
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comclo[xi]-boxc_xh) - np.absolute(comcup[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comclo[yi]-boxc_yh) - np.absolute(comcup[yi]-boxc_yh)
rxy = np.sqrt(dx**2+dy**2)
#get 4d map values
comavg = (comcup+comclo)/2.0
xavg = comavg[xi]
yavg = comavg[yi]
zlo = comclo[zi]
zhi = comcup[zi]
if rxy<distxy:
distxy=rxy
distz = np.absolute(dz)
mindex=meml
xavgm = xavg
yavgm = yavg
zlom = zlo
zhim = zhi
#upper_match.append([mindex,distz])
#print "n ",n," xvg ", xavgm," yvg ", yavgm
zdists[f,n]=distz
#maps
zmaps[f,n,0]=dt
zmaps[f,n,1]=xavgm
zmaps[f,n,2]=yavgm
zmaps[f,n,3]=zlom
zmaps[f,n,4]=zhim
zmaps[f,n,5]=distz
n+=1
#break
zavgs = np.zeros((self.nframes, 5))
zdtstat = RunningStats()
for fr in xrange(self.nframes):
currtime = self.frame[fr].time
dt = currtime
curr = zdists[fr,:]
zavgcurr = curr.mean()
zdevcurr = curr.std()
zdtstat.Push(zavgcurr)
zdtcurr = zdtstat.Mean()
zdtdcurr = zdtstat.Deviation()
zavgs[fr,0]=dt
zavgs[fr,1]=zavgcurr
zavgs[fr,2]=zdevcurr
zavgs[fr,3]=zdtcurr
zavgs[fr,4]=zdtdcurr
return zavgs,zmaps
#return zmaps
# a simple cluster/chain analysis routine
def CheckClustering(self, leaflet="both",group="all", dist=10.0):
indices = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
#print "there are ",len(indices)," members"
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
#reset the system cluster list
self.clusters = []
# numpy array to store output for return
outdata = np.zeros((self.nframes,13))
#stats objects - time averages
ncstat = RunningStats() #number of clusters
asstat = RunningStats() # average cluster size
misstat = RunningStats() # minimum cluster size
masstat = RunningStats() # maximum cluster size
#loop over frames
for f in xrange(self.nframes):
fr = self.frame[f]
ctime = fr.time
clusters = []
# masterlistf = []
# masterlistf += masterlist
#rebuild the master list each frame
masterlistf = list()
for i in indices:
masterlistf.append([i, False])
# print "master ",masterlistf
boxc=fr.box
boxc_xh = boxc[xi]/2.0
boxc_yh = boxc[yi]/2.0
#print boxc
clustind = 0
neighborlist = []
while len(masterlistf)>0:
#print "master ",masterlistf
start = masterlistf[0][0]
masterlistf[0][1]=True
# print
# reset the neighborlist
neighborlist = []
#seed the neighborlist with the start
neighborlist.append(start)
#now loop over the neighborlist and build neighbors and neighbors of neigbors for this cluster
i=0
while i < len(neighborlist):
ele = neighborlist[i]
startn = ele
coms = fr.lipidcom[startn].com
#get neighbors of the start
#mindex=0
for j in xrange(len(masterlistf)):
#for elem in masterlistf:
elem = masterlistf[j]
incluster = elem[1]
# print "second incluster ",incluster
if not incluster:
ci = elem[0]
comc = fr.lipidcom[ci].com
#dcom = comc-coms
dx = comc[xi]-coms[xi]
dy = comc[yi]-coms[yi]
#rxy = np.sqrt(dx*dx+dy*dy)
#print dx," ",dy," ",rxy
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comc[xi]-boxc_xh) - np.absolute(coms[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comc[yi]-boxc_yh) - np.absolute(coms[yi]-boxc_yh)
rxy = np.sqrt(dx*dx+dy*dy)
#print "rxy ",rxy," dx ",dx," dy ",dy
if rxy <= dist:
#print "passed! adding ",masterlistf[mindex][0]," to the neighborlist"
neighborlist.append(masterlistf[j][0])
masterlistf[j][1]=True
#mindex+=1
i+=1
#filter the masterlistf
# print "neighlist", neighborlist
masterlistf=list([v for v in masterlistf if v[1] == False])
if len(neighborlist) > 1:
clusters.append([])
clusters[clustind]=list(neighborlist)
#print "clustind clusters[clustind]"
#print clustind, " ",clusters
clustind+=1
#print masterlistf
#filter out single points
#clusters = [v for v in clusters if len(v) > 1]
nclusters = len(clusters)
clsizestat = RunningStats()
mini = 100000000
maxi = -1000000
for cluster in clusters:
size = len(cluster)
clsizestat.Push(size)
if size>maxi:
maxi=size
if size < mini:
mini=size
avgsize = clsizestat.Mean()
#store instantaneous values
outdata[f,0] = ctime
outdata[f,1]= nclusters
outdata[f,2] = avgsize
outdata[f,3] = mini
outdata[f,4] = maxi
#push to the time averages
ncstat.Push(nclusters)
asstat.Push(avgsize)
misstat.Push(mini)
masstat.Push(maxi)
#store current time averages
outdata[f,5] = ncstat.Mean()
outdata[f,6] = ncstat.Deviation()
outdata[f,7] = asstat.Mean()
outdata[f,8] = asstat.Deviation()
outdata[f,9] = misstat.Mean()
outdata[f,10] = misstat.Deviation()
outdata[f,11] = masstat.Mean()
outdata[f,12] = masstat.Deviation()
# now add cluster list to the system storage
self.clusters.append(list(clusters))
#print clusters
print "Frame ",f
print "There are ",nclusters," clusters with an average size of ",avgsize
print "the largest cluster was ",maxi," and the smallest was ",mini
return outdata
#takes the cluster lists from self.clusters and gets the plane coordinates
# need to call the 'CheckClustering' function before calling this one
def ExportClustersForPlotting(self):
if len(self.clusters) == 0:
print "Warning!! - call to \'ExportClustersForPlotting\' of a MemSys object with no cluster lists"
print " ---------- the \'CheckClustering\' function needs to be called first!"
return
xi = self.plane[0]
yi = self.plane[1]
#get the maximum number of clusters from any of the frames
maxsize = 0
for f in xrange(len(self.clusters)):
nclust = len(self.clusters[f])
if nclust>maxsize:
maxsize=nclust
#generate a color array
colors = cm.rainbow(np.linspace(0, 1, maxsize))
output = []
for f in xrange(len(self.clusters)):
frame_clusters = self.clusters[f]
frame_data = []
nclust = len(frame_clusters)
#print len(frame_clusters)
#print len(colors)
c = 0
xcoord = []
#xm1 = []
#xp1 = []
ycoord = []
#ym1 = []
#yp1 =[]
coord_color = []
for cluster in frame_clusters:
for index in cluster:
xc = self.frame[f].lipidcom[index].com[xi]
#xcm1 = self.frame[f].lipidcom[index].com[xi]-self.frame[f].box[xi]
#xcp1 = self.frame[f].lipidcom[index].com[xi]+self.frame[f].box[xi]
yc = self.frame[f].lipidcom[index].com[yi]
#ycm1 = self.frame[f].lipidcom[index].com[yi]-self.frame[f].box[yi]
#ycp1 = self.frame[f].lipidcom[index].com[yi]+self.frame[f].box[yi]
xcoord.append(xc)
#xm1.append(xcm1)
#xp1.append(xcp1)
ycoord.append(yc)
#ym1.append(ycm1)
#yp1.append(ycp1)
#print c," ",colors[c]
coord_color.append(colors[c])
c+=1
#output.append([xm1,xcoord,xp1,ym1,ycoord,yp1,coord_color])
output.append([xcoord,ycoord,coord_color])
return output
# function to compute an approximation of the area per lipid of a group using
# closest neighbor circles
def CalcAreaPerLipid_ClosestNeighborCircle(self, leaflet="both",group="all"):
#diffusion dimension - assume lateral so, dim=2
dim=2
do_leaflet = []
nlip = 0
if leaflet == "both":
do_leaflet.append('upper')
do_leaflet.append('lower')
nlip=self.nlipids
elif leaflet == "upper" or leaflet == "lower":
do_leaflet.append(leaflet)
nlip = len(self.leaflets[leaflet])
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
sub_fact = (2.0*np.pi/3.0 - np.sqrt(3.0)/2.0)
#initialize a numpy array to hold the msd for the selection
areas = np.zeros((self.nframes, 5))
#initialize a running stats object to do the averaging
area_stat = RunningStats()
n_leaflet = len(do_leaflet)
#build the index lists
indices_leaflet = {}
all_mem_leaflet = {}
for leaflets in do_leaflet:
indices = list()
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
all_mem = list(self.leaflets[leaflets].GetMemberIndices())
all_mem_leaflet[leaflets] = list(all_mem)
indices_leaflet[leaflets]=list(indices)
#loop over the frames
for f in xrange(self.nframes):
fr = self.frame[f]
dt = fr.time
boxc=fr.box
boxc_xh = boxc[xi]/2.0
boxc_yh = boxc[yi]/2.0
lat_area = boxc_xh*boxc_yh*4.0
if leaflet == 'both':
lat_area*=2.0
area_stat_config = RunningStats()
#loop over the leaflets
for leaflets in do_leaflet:
indices = indices_leaflet[leaflets]
all_mem = all_mem_leaflet[leaflets]
#loop over the group indices in this leaflet
for index in indices:
comc = fr.lipidcom[index].com[:]
rdist_min = 10000.0
#loop over the COMs of non group
#get all the leaflet members
for a in all_mem:
#print "a ",a
if a != index:
comn = fr.lipidcom[a].com[:]
dx = comc[xi]-comn[xi]
dy = comc[yi]-comn[yi]
#Minimum image -- coordinates must be pre-wrapped
if np.absolute(dx) > boxc_xh:
dx = boxc[xi] - np.absolute(comc[xi]-boxc_xh) - np.absolute(comn[xi]-boxc_xh)
if np.absolute(dy) > boxc_yh:
dy = boxc[yi] - np.absolute(comc[yi]-boxc_yh) - np.absolute(comn[yi]-boxc_yh)
rxy = np.sqrt(dx*dx+dy*dy)
#print "rxy ",rxy," dx ",dx," dy ",dy
if rxy < rdist_min:
rdist_min = rxy
#got the min dist, now compute area
#print "rdist_min ",rdist_min
area = np.pi*rdist_min*rdist_min - (rdist_min*rdist_min)*sub_fact
area_stat_config.Push(area)
area_conf_avg = area_stat_config.Mean()
area_stat.Push(area_conf_avg)
area_time_run = area_stat.Mean()
area_time_run_dev = area_stat.Deviation()
#print "time ",dt
areas[f][0]=dt
areas[f][1]=area_conf_avg
areas[f][2]=area_time_run
areas[f][3]=area_time_run_dev
areas[f][4]=lat_area/nlip
return areas
# function to compute the area per lipid using the lateral box sizes and numbers of lipids:
def CalcAreaPerLipid_Box(self, leaflet="both"):
#diffusion dimension - assume lateral so, dim=2
dim=2
do_leaflet = []
nlip = 0
if leaflet == "both":
do_leaflet.append('upper')
do_leaflet.append('lower')
nlip = []
for leaflets in do_leaflet:
nlip.append(float(len(self.leaflets[leaflets])))
elif leaflet == "upper":
do_leaflet.append(leaflet)
nlip = len(self.leaflets[leaflet])
elif leaflet == "lower":
do_leaflet.append(leaflet)
nlip = len(self.leaflets[leaflet])
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
#initialize a numpy array to hold the msd for the selection
areas = np.zeros((self.nframes, 4))
#initialize a running stats object to do the averaging
area_stat = RunningStats()
n_leaflet = len(do_leaflet)
#loop over the frames
for f in xrange(self.nframes):
fr = self.frame[f]
dt = fr.time
boxc=fr.box
boxc_xh = boxc[xi]/2.0
boxc_yh = boxc[yi]/2.0
lat_area = boxc_xh*boxc_yh*4.0
area_per_lip = lat_area/nlip
if leaflet == 'both':
area_per_lip = (lat_area/2.0)*( (nlip[0]+nlip[1])/(nlip[0]*nlip[1]))
area_stat.Push(area_per_lip)
area_time_run = area_stat.Mean()
area_time_run_dev = area_stat.Deviation()
areas[f][0]=dt
areas[f][1]=area_per_lip
areas[f][2]=area_time_run
areas[f][3]=area_time_run_dev
return areas
# do Voronoi tesselation using the COMs as generators
def VoronoiTesselate(self, leaflet="both",group="all"):
indices = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
#print "there are ",len(indices)," members"
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
out_tess = []
for f in xrange(self.nframes):
# get the current frame
curr_frame = self.frame[f]
# get the coordinates for the selection at this frame
com_curr = np.zeros((n_com,2))
count=0
for i in indices:
com_i = curr_frame.lipidcom[i].com_unwrap[self.plane]
com_curr[count]=com_i[:]
count+=1
vor = Voronoi(com_curr)
#out_tess.append([com_curr[:,0],com_curr[:,1],vor])
out_tess.append(vor)
return out_tess
# do Delauny tesselation using the COMs as generators
def DelaunayTesselate(self, leaflet="both",group="all"):
indices = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
#print "there are ",len(indices)," members"
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
out_tess = []
for f in xrange(self.nframes):
# get the current frame
curr_frame = self.frame[f]
# get the coordinates for the selection at this frame
com_curr = np.zeros((n_com,2))
count=0
for i in indices:
com_i = curr_frame.lipidcom[i].com_unwrap[self.plane]
com_curr[count]=com_i[:]
count+=1
tri = Delaunay(com_curr)
out_tess.append([com_curr[:,0],com_curr[:,1],tri])
return out_tess
# generate the step vectors of the center of mass--in the lateral dimensions
def StepVector(self, leaflet="both",group="all",fstart=0,fend=-1,fstep=1000,wrapped=False):
indices = []
if fstart<0:
fstart+=self.nframes
if fend < 0:
fend+=self.nframes
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
#print "there are ",len(indices)," members"
xi = self.plane[0]
yi = self.plane[1]
zi = self.norm
vec_ends_out = []
for f in xrange(fstart,fend+1,fstep):
fprev = f-fstep
# get the current frame
curr_frame = self.frame[f]
prev_frame = self.frame[fprev]
# get the coordinates for the selection at this frame
vec_ends = np.zeros((n_com,4))
#vec_ends = []
count=0
for i in indices:
com_i = curr_frame.lipidcom[i].com_unwrap[self.plane]
com_j = prev_frame.lipidcom[i].com_unwrap[self.plane]
com_j_w = prev_frame.lipidcom[i].com[self.plane]
if wrapped:
vec_ends[count,0]=com_j_w[0]
vec_ends[count,1]=com_j_w[1]
else:
vec_ends[count,0]=com_j[0]
vec_ends[count,1]=com_j[1]
vec_ends[count,2]=com_i[0] - com_j[0]
vec_ends[count,3]=com_i[1] - com_j[1]
# vec_ends.append([com_j[0],com_j[0],com_i[0]-com_j[0],com_i[1]-com_j[1]])
count+=1
vec_ends_out.append(vec_ends)
return vec_ends_out
# generate the step vectors of the center of mass
def StepVectorColors(self, leaflet="both",group="all"):
indices = []
ngroups = 1
group_names = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
curr_group_names = curr_leaf.GetGroupNames()
if group == 'all':
for gname in curr_group_names:
if gname not in group_names:
group_names.append(gname)
else:
group_names.append(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
curr_group_names = curr_leaf.GetGroupNames()
if group == 'all':
for gname in curr_group_names:
if gname not in group_names:
group_names.append(gname)
else:
group_names.append(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
curr_group_names = curr_leaf.GetGroupNames()
if group == 'all':
for gname in curr_group_names:
if gname not in group_names:
group_names.append(gname)
else:
group_names.append(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
curr_group_names = curr_leaf.GetGroupNames()
if group == 'all':
for gname in curr_group_names:
if gname not in group_names:
group_names.append(gname)
else:
group_names.append(group)
n_com = len(indices)
ngroups = len(group_names)
colors = cm.rainbow(np.linspace(0, 1, ngroups))
#build color map
cmap = {}
n = 0
for name in group_names:
cmap[name] = colors[n]
n+=1
#pick a frame-just use first frame
curr_frame = self.frame[0]
colors_out = np.zeros( (n_com,4))
count=0
for i in indices:
name_i = curr_frame.lipidcom[i].type
colors_out[count] = cmap[name_i]
count+=1
return colors_out,cmap
def RemoveLeafletCOMmotion(self,leaflet="both"):
do_leaflet = []
nlip = 0
if leaflet == "both":
do_leaflet.append('upper')
do_leaflet.append('lower')
nlip = []
for leaflets in do_leaflet:
nlip.append(float(len(self.leaflets[leaflets])))
elif leaflet == "upper":
do_leaflet.append(leaflet)
nlip = len(self.leaflets[leaflet])
elif leaflet == "lower":
do_leaflet.append(leaflet)
nlip = len(self.leaflets[leaflet])
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
do_leaflet.append('upper')
do_leaflet.append('lower')
leaf_indices = {}
for leaf in do_leaflet:
leaf_indices[leaf]=list(self.leaflets[leaf].GetMemberIndices())
for f in xrange(self.nframes):
fr = self.frame[f]
for leaf in do_leaflet:
indices=leaf_indices[leaf]
#get the leaflet COM
lcom = np.zeros(3)
masst = 0.0
for i in indices:
lcom+=(fr.lipidcom[i].com_unwrap*fr.lipidcom[i].mass)
masst+=fr.lipidcom[i].mass
lcom/=masst
for i in indices:
fr.lipidcom[i].com_unwrap-=lcom
self.frame[f]=fr
return
############### multiprocessor parallelized versions of calculation member functions
# parallelized version of CalcMSD- using the multiprocessing module
def CalcMSD_parallel(self, leaflet="both",group="all",nprocs=2,timeaverage=False):
indices = []
#diffusion dimension - assume lateral so, dim=2
dim=2
if leaflet == "both":
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
elif leaflet == "upper":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
elif leaflet == "lower":
curr_leaf = self.leaflets[leaflet]
indices=curr_leaf.GetGroupIndices(group)
else:
#unknown option--use default "both"
print "!! Warning - request for unknown leaflet name \'",leaflet,"\' from the ",self.name," leaflet"
print "!! the options are \"upper\", \"lower\", or \"both\"--using the default \"both\""
for leaflets in self.leaflets:
curr_leaf = self.leaflets[leaflets]
indices+=curr_leaf.GetGroupIndices(group)
n_com = len(indices)
frame_ranges = []
total_frames = self.nframes
frames_per_proc_base = total_frames/nprocs
left_over = total_frames % (frames_per_proc_base * nprocs)
print "total frames ",total_frames
print "frames per proc ",frames_per_proc_base
print "left over ",left_over
#assign base ranges
for i in xrange(nprocs):
fs = i*frames_per_proc_base
fe = fs + frames_per_proc_base - 1
frame_ranges.append([fs,fe])
print "frame_ranges (pre-adjust):"
print frame_ranges
#now adjust for leftovers - divide them "equally" over the processes
lo = left_over
while lo > 0:
for i in xrange(nprocs):
frame_ranges[i][1]+=1
for j in xrange(i+1,nprocs):
frame_ranges[j][0]+=1
frame_ranges[j][1]+=1
lo-=1
if lo == 0:
break
print "nprocs ",nprocs
print "frame_ranges (post adjust): "
print frame_ranges
#initialize a numpy array to hold the msd for the selection
msd = np.zeros((self.nframes, 4))
#
msd_frames = MSD_frames
#frames_local = getattr(self, 'frame')
#shelf_local = shelve.open(self.frame.fs_name,flag="r", protocol=2)
frames_local = par_frames(self.frame.nframes,self.frame.fs_name,self.frame.frame_shelf)
#frames_local = par_frames(self.frame.nframes,self.frame.fs_name)
#frames_local = par_frames(self.frame.nframes,self.frame.fs_name,shelf_local)
plane_local = self.plane
#create process pool
pool = mp.Pool(processes=nprocs)
results = [pool.apply_async(msd_frames,args=(frames_local,frame_ranges[i][0],frame_ranges[i][1],indices,0,plane_local)) for i in range(0,nprocs)]
# print "results:"
# print results
results_ordered = [p.get() for p in results]
# print "results ordered: "
# print results_ordered
# #collect results into single array for return
i = 0
# print "len(results_ordered) ",len(results_ordered)
for p in results_ordered:
fs = frame_ranges[i][0]
fe = frame_ranges[i][1]
#print fs, fe
#print msd[fs:(fe+1)].shape
#print p[:].shape
msd[fs:(fe+1)] = p[:]
i+=1
pool.close()
pool.join()
#initialize a numpy array to hold the msd for the selection
msd_tavg = msd[:]
if timeaverage:
#regenerate the container
msd_tavg = np.zeros((self.nframes, 6))
# get the running time average
tavg_msd = GenRunningAverage(msd[:,1])
#slice together the values
msd_tavg[:,0:4]=msd[:,:]
msd_tavg[:,4:6]=tavg_msd[:,:]
#shelf_local.close()
return msd_tavg
#function to compute the thickness of the membrane (in the normal direction). The algorithm is based on
# the GridMAT-MD bilayer thickness calculation (except without the gridding procedure)
def CalcMembraneThickness_parallel(self,nprocs=2,timeaverage=True):
nlip = self.nlipids
comcup = np.zeros(3)
comclo = np.zeros(3)
dcom = np.zeros(3)
zdists = np.zeros((self.nframes, 3))
zmaps = np.zeros((self.nframes, self.nlipids, 6))
frame_ranges = []
total_frames = self.nframes
frames_per_proc_base = total_frames/nprocs
left_over = total_frames % (frames_per_proc_base * nprocs)
print "total frames ",total_frames
print "frames per proc ",frames_per_proc_base
print "left over ",left_over
#assign base ranges
for i in xrange(nprocs):
fs = i*frames_per_proc_base
fe = fs + frames_per_proc_base - 1
frame_ranges.append([fs,fe])
print "frame_ranges (pre-adjust):"
print frame_ranges
#now adjust for leftovers - divide them "equally" over the processes
lo = left_over
while lo > 0:
for i in xrange(nprocs):
frame_ranges[i][1]+=1
for j in xrange(i+1,nprocs):
frame_ranges[j][0]+=1
frame_ranges[j][1]+=1
lo-=1
if lo == 0:
break
print "nprocs ",nprocs
print "frame_ranges (post adjust): "
print frame_ranges
thick_frames = Thickness_frames
frames_local = par_frames(self.frame.nframes,self.frame.fs_name,self.frame.frame_shelf)
plane_local = self.plane
norm_local = self.norm
#create process pool
pool = mp.Pool(processes=nprocs)
results = [pool.apply_async(thick_frames,args=(frames_local,frame_ranges[i][0],frame_ranges[i][1],self.leaflets,nlip,plane_local,norm_local)) for i in range(0,nprocs)]
print "results:"
# print results
print "len(results) ",len(results)
results_ordered = [p.get() for p in results]
print "results ordered: "
# print results_ordered
# #collect results into single array for return
i = 0
#print "len(results_ordered) ",len(results_ordered)
for p in results_ordered:
fs = frame_ranges[i][0]
fe = frame_ranges[i][1]
print fs, fe
#print msd[fs:(fe+1)].shape
#print p[:].shape
zdistf = p[0]
zmapf = p[1]
#print zdistf.shape," ",zmapf.shape
zdists[fs:(fe+1)] = zdistf[:]
zmaps[fs:(fe+1)] = zmapf[:]
#zdists[fs:(fe+1)] = pg[:]
i+=1
pool.close()
pool.join()
#initialize a numpy array to hold the msd for the selection
zdist_tavg = zdists
if timeaverage:
#regenerate the container
zdist_tavg = np.zeros((self.nframes, 5))
# get the running time average
tavg_dz = GenRunningAverage(zdists[:,1])
#slice together the values
zdist_tavg[:,0:3]=zdists[:,:]
zdist_tavg[:,3:5]=tavg_dz[:,:]
#shelf_local.close()
return zdsit_tavg,zmaps
#return zdist_tavg
| [
"[email protected]"
] | |
5b1bfed3e7c9fb66a7429d873ee4764334aca828 | 05a4c12938935c1ea01bc1f43ab829f1e35c7168 | /tests/qflags/test_QtCore_UserInputResolutionOptions_UserInputResolutionOption.py | 41d9a397d82484468f77cce905c44209183de94c | [
"GPL-3.0-only"
] | permissive | bluebird75/PyQt5-stubs | e4eae29c49f15012649290fe7098c53e216254ed | e9adedcb817fa88ba854763d7b403d817ea83a00 | refs/heads/master | 2023-08-09T02:05:10.082636 | 2023-07-23T09:14:59 | 2023-07-23T09:14:59 | 208,842,053 | 0 | 0 | MIT | 2019-09-16T16:04:47 | 2019-09-16T16:04:46 | null | UTF-8 | Python | false | false | 10,179 | py | # mypy: no-warn-unreachable
import sys
from typing import Union, TypeVar, Type
if sys.version_info[:2] >= (3,8):
from typing import Literal
else:
from typing_extensions import Literal
import pytest
### Specific part
# file generated from qflags_test_template.py for QFlags class "QUrl.UserInputResolutionOptions" and flag class "QUrl.UserInputResolutionOption"
from PyQt5 import QtCore
OneFlagClass = QtCore.QUrl.UserInputResolutionOption
MultiFlagClass = QtCore.QUrl.UserInputResolutionOptions
oneFlagRefValue1 = QtCore.QUrl.UserInputResolutionOption.DefaultResolution
oneFlagRefValue2 = QtCore.QUrl.UserInputResolutionOption.AssumeLocalFile
OR_CONVERTS_TO_MULTI: Literal[False] = False
OR_INT_CONVERTS_TO_MULTI: Literal[False] = False
INT_OR_CONVERTS_TO_MULTI: Literal[False] = False
### End of specific part
def assert_type_of_value_int(value: int) -> None:
'''Raise an exception if the value is not of type expected_type'''
assert isinstance(value, int)
def assert_type_of_value_oneFlag(value: OneFlagClass) -> None:
'''Raise an exception if the value is not of type expected_type'''
assert type(value) == OneFlagClass
def assert_type_of_value_multiFlag(value: MultiFlagClass) -> None:
'''Raise an exception if the value is not of type expected_type'''
assert type(value) == MultiFlagClass
def test_on_one_flag_class() -> None:
oneFlagValue1 = oneFlagRefValue1
oneFlagValue2 = oneFlagRefValue2
oneFlagValueTest = oneFlagValue1 # type: OneFlagClass
intValue = 0 # type: int
oneOrMultiFlagValueTest = oneFlagValue1 # type: Union[OneFlagClass, MultiFlagClass]
oneFlagOrIntValue = oneFlagValue1 # type: Union[int, OneFlagClass]
# upcast from OneFlagClass to int
intValue = oneFlagValue1
# conversion also accepted
intValue = int(oneFlagValue1)
# this is not supported type-safely for a good reason
oneFlagValueTest = 1 # type: ignore
# correct way to do it
oneFlagValueTest = OneFlagClass(1)
oneFlagValueTest = OneFlagClass(oneFlagValue1)
# The rules of OneFlagClass conversion defined in PyQt5 are:
# 1. | ~= with OneFlagClass return a MultiFlagClass (which is not compatible to int)
# Note that this breaks Liskov principle
# 2. everything else returns int: & ^ &= ^=
# 3. operations with int return int.
if OR_CONVERTS_TO_MULTI:
assert_type_of_value_multiFlag(oneFlagValue1 | oneFlagValue2)
else:
assert_type_of_value_int(oneFlagValue1 | oneFlagValue2)
assert_type_of_value_int(~oneFlagValue1)
assert_type_of_value_int(oneFlagValue1 & oneFlagValue2)
assert_type_of_value_int(oneFlagValue1 ^ oneFlagValue2)
# right operand
if OR_INT_CONVERTS_TO_MULTI:
assert_type_of_value_multiFlag(oneFlagValue1 | 1)
else:
assert_type_of_value_int(oneFlagValue1 | 1)
assert_type_of_value_int(oneFlagValue1 & 1)
assert_type_of_value_int(oneFlagValue1 ^ 1)
assert_type_of_value_int(oneFlagValue1 + 1)
assert_type_of_value_int(oneFlagValue1 - 1)
# left operand
if INT_OR_CONVERTS_TO_MULTI:
assert_type_of_value_multiFlag(1 | oneFlagValue1)
else:
assert_type_of_value_int(1 | oneFlagValue1)
assert_type_of_value_int(1 & oneFlagValue1)
assert_type_of_value_int(1 ^ oneFlagValue1)
assert_type_of_value_int(1 + oneFlagValue1)
assert_type_of_value_int(1 - oneFlagValue1)
if OR_CONVERTS_TO_MULTI:
oneOrMultiFlagValueTest = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneOrMultiFlagValueTest)
oneOrMultiFlagValueTest |= oneFlagValue2
assert_type_of_value_multiFlag(oneOrMultiFlagValueTest) # nice violation of Liskov principle here
else:
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue |= oneFlagValue2
assert_type_of_value_int(oneFlagOrIntValue)
if OR_INT_CONVERTS_TO_MULTI:
oneOrMultiFlagValueTest = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneOrMultiFlagValueTest)
oneOrMultiFlagValueTest |= 1
assert_type_of_value_multiFlag(oneOrMultiFlagValueTest)
else:
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue |= 1
assert_type_of_value_int(oneFlagOrIntValue)
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue &= 1
assert_type_of_value_int(oneFlagOrIntValue)
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue &= oneFlagValue2
assert_type_of_value_int(oneFlagOrIntValue)
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue ^= 1
assert_type_of_value_int(oneFlagOrIntValue)
oneFlagOrIntValue = oneFlagValue1 # reset type and value
assert_type_of_value_oneFlag(oneFlagOrIntValue)
oneFlagOrIntValue ^= oneFlagValue2
assert_type_of_value_int(oneFlagOrIntValue)
def test_on_multi_flag_class() -> None:
oneFlagValue1 = oneFlagRefValue1
multiFlagValue1 = MultiFlagClass()
multiFlagValue2 = MultiFlagClass()
multiFlagValueTest = multiFlagValue1 # type: MultiFlagClass
intValue = 0
assert_type_of_value_oneFlag(oneFlagValue1)
assert_type_of_value_multiFlag(multiFlagValue1)
assert_type_of_value_multiFlag(multiFlagValue2)
assert_type_of_value_multiFlag(multiFlagValueTest)
assert_type_of_value_int(intValue)
# MultiFlagClass may be created by combining MultiFlagClass together
assert_type_of_value_multiFlag( ~multiFlagValue1 )
assert_type_of_value_multiFlag( multiFlagValue1 | multiFlagValue2 )
assert_type_of_value_multiFlag( multiFlagValue1 & multiFlagValue2 )
assert_type_of_value_multiFlag( multiFlagValue1 ^ multiFlagValue2 )
# MultiFlagClass may be created by combining MultiFlagClass and OneFlagClass, left or right
assert_type_of_value_multiFlag( multiFlagValue1 | oneFlagValue1 )
assert_type_of_value_multiFlag( multiFlagValue1 & oneFlagValue1 )
assert_type_of_value_multiFlag( multiFlagValue1 ^ oneFlagValue1 )
assert_type_of_value_multiFlag( oneFlagValue1 | multiFlagValue1 )
assert_type_of_value_multiFlag( oneFlagValue1 & multiFlagValue1 )
assert_type_of_value_multiFlag( oneFlagValue1 ^ multiFlagValue1 )
# MultClassFlag may be created by combining MultiFlagClass and int, right only
assert_type_of_value_multiFlag(multiFlagValue1 | 1)
assert_type_of_value_multiFlag(multiFlagValue1 & 1)
assert_type_of_value_multiFlag(multiFlagValue1 ^ 1)
# this is rejected by mypy and is slightly annoying: you can not pass a OneFlagClass variable to a method expecting a MultiFlagClass
# explicit typing must be used on those methods to accept both OneFlagClass and MultiFlagClass
multiFlagValueTest = oneFlagValue1 # type: ignore
# correct way to do it
multiFlagValueTest = MultiFlagClass(oneFlagValue1)
assert_type_of_value_multiFlag(multiFlagValueTest)
# this is rejected for the same reason as for OneFlagClass.
intValue = multiFlagValueTest # type: ignore
# correct way to do it
intValue = int(multiFlagValueTest)
assert_type_of_value_int(intValue)
# rejected by mypy rightfully
multiFlagValueTest = 1 # type: ignore
# correct way to do it
multiFlagValueTest = MultiFlagClass(1)
# assignments operations with OneFlagClass
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest |= oneFlagValue1
assert_type_of_value_multiFlag(multiFlagValueTest)
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest &= oneFlagValue1
assert_type_of_value_multiFlag(multiFlagValueTest)
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest ^= oneFlagValue1
assert_type_of_value_multiFlag(multiFlagValueTest)
# assignments operations with int
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest |= 1
assert_type_of_value_multiFlag(multiFlagValueTest)
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest &= 1
assert_type_of_value_multiFlag(multiFlagValueTest)
assert_type_of_value_multiFlag(multiFlagValueTest)
multiFlagValueTest ^= 1
assert_type_of_value_multiFlag(multiFlagValueTest)
#########################################################1
#
# Exploring errors
#
#########################################################1
# This checks the following:
# + and - operations are not supported on MultiFlagClass
# combining int with MultiFlagClass does not work
pytest.raises(TypeError, lambda: 1 | multiFlagValue1 ) # type: ignore[operator]
pytest.raises(TypeError, lambda: 1 & multiFlagValue1 ) # type: ignore[operator]
pytest.raises(TypeError, lambda: 1 ^ multiFlagValue1 ) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 + multiFlagValue2 ) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 - multiFlagValue2 ) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 + oneFlagValue1) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 - oneFlagValue1) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 + 1) # type: ignore[operator]
pytest.raises(TypeError, lambda: multiFlagValue1 - 1) # type: ignore[operator]
pytest.raises(TypeError, lambda: oneFlagValue1 + multiFlagValue1) # type: ignore[operator]
pytest.raises(TypeError, lambda: oneFlagValue1 - multiFlagValue1) # type: ignore[operator]
pytest.raises(TypeError, lambda: 1 + multiFlagValue1) # type: ignore[operator]
pytest.raises(TypeError, lambda: 1 - multiFlagValue1) # type: ignore[operator]
def f1() -> None:
multiFlagValueTest = MultiFlagClass()
multiFlagValueTest += oneFlagValue1 # type: ignore[assignment, operator]
def f2() -> None:
multiFlagValueTest = MultiFlagClass()
multiFlagValueTest += 1 # type: ignore[assignment, operator]
def f3() -> None:
multiFlagValueTest = MultiFlagClass()
multiFlagValueTest -= oneFlagValue1 # type: ignore[assignment, operator]
def f4() -> None:
multiFlagValueTest = MultiFlagClass()
multiFlagValueTest -= 1 # type: ignore[assignment, operator]
pytest.raises(TypeError, f1)
pytest.raises(TypeError, f2)
pytest.raises(TypeError, f3)
pytest.raises(TypeError, f4)
| [
"[email protected]"
] | |
814b8b9b61015a2c4b7bc2887f99e6ddf0cb5d62 | 4bd555bc662b8182a2e7644976bfdb00ed5e1ebe | /PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/xhtml2pdf/default.py | 2900140509bcb3b85a652499731d715219f06229 | [] | no_license | fhelmli/homeNOWG2 | a103df1ef97194dec9501dbda87ec1f7c111fb4a | e794fd87b296544542fd9dc7ac94c981c6312419 | refs/heads/master | 2020-04-04T13:40:20.417769 | 2019-01-30T21:41:04 | 2019-01-30T21:41:04 | 155,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,929 | py | #import pythonista
# -*- coding: utf-8 -*-
from reportlab.lib.pagesizes import (A0, A1, A2, A3, A4, A5, A6, B0, B1, B2, B3,
B4, B5, B6, LETTER, LEGAL, ELEVENSEVENTEEN)
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PML_WARNING = "warning"
PML_ERROR = "error"
PML_EXCEPTION = "PML Exception"
PML_PREFIX = "pdf:"
#CLASS = 1
BOOL = 2
FONT = 3
COLOR = 4
FILE = 5
SIZE = 6
INT = 7
STRING = 8
BOX = 9
POS = 10
#STYLE = 11
MUST = 23
"""
Definition of all known tags. Also used for building the reference
"""
TAGS = {
# FORMAT
#"document": (1, {
# "format": (["a0", "a1", "a2", "a3", "a4", "a5", "a6",
# "b0", "b1", "b2", "b3", "b4", "b5", "b6",
# "letter", "legal", "elevenseventeen"], "a4"),
# "orientation": ["portrait", "landscape"],
# "fullscreen": (BOOL, "0"),
# "author": (STRING, ""),
# "subject": (STRING, ""),
# "title": (STRING, ""),
# "duration": INT,
# "showoutline": (BOOL, "0"),
# "outline": INT,
# }),
"pdftemplate": (1, {
"name": (STRING, "body"),
"format": (["a0", "a1", "a2", "a3", "a4", "a5", "a6",
"b0", "b1", "b2", "b3", "b4", "b5", "b6",
"letter", "legal", "elevenseventeen"], "a4"),
"orientation": ["portrait", "landscape"],
"background": FILE,
}),
"pdfframe": (0, {
"name": (STRING, ""),
"box": (BOX, MUST),
"border": (BOOL, "0"),
"static": (BOOL, "0"),
}),
#"static": (1, {
# "name": STRING,
# "box": (BOX, MUST),
# "border": (BOOL, "0"),
# }),
"pdfnexttemplate": (0, {
"name": (STRING, "body"),
}),
"pdfnextpage": (0, {
"name": (STRING, ""),
# "background": FILE,
}),
"pdfnextframe": (0, {}),
"pdffont": (0, {
"src": (FILE, MUST),
"name": (STRING, MUST),
# "print": (BOOL, "0"),
"encoding": (STRING, "WinAnsiEncoding"),
}),
"pdfdrawline": (0, {
"from": (POS, MUST),
"to": (POS, MUST),
"color": (COLOR, "#000000"),
"width": (SIZE, 1),
}),
"drawpoint": (0, {
"pos": (POS, MUST),
"color": (COLOR, "#000000"),
"width": (SIZE, 1),
}),
"pdfdrawlines": (0, {
"coords": (STRING, MUST),
"color": (COLOR, "#000000"),
"width": (SIZE, 1),
}),
"pdfdrawstring": (0, {
"pos": (POS, MUST),
"text": (STRING, MUST),
"color": (COLOR, "#000000"),
"align": (["left", "center", "right"], "right"),
"valign": (["top", "middle", "bottom"], "bottom"),
# "class": CLASS,
"rotate": (INT, "0"),
}),
"pdfdrawimg": (0, {
"pos": (POS, MUST),
"src": (FILE, MUST),
"width": SIZE,
"height": SIZE,
"align": (["left", "center", "right"], "right"),
"valign": (["top", "middle", "bottom"], "bottom"),
}),
"pdfspacer": (0, {
"height": (SIZE, MUST),
}),
"pdfpagenumber": (0, {
"example": (STRING, "0"),
}),
"pdfpagecount": (0, {
}),
"pdftoc": (0, {
}),
"pdfversion": (0, {
}),
"pdfkeeptogether": (1, {
}),
"pdfkeepinframe": (1, {
"maxwidth": SIZE,
"maxheight": SIZE,
"mergespace": (INT, 1),
"mode": (["error", "overflow", "shrink", "truncate"], "shrink"),
"name": (STRING, "")
}),
# The chart example, see pml_charts
"pdfchart": (1, {
"type": (["spider", "bar"], "bar"),
"strokecolor": (COLOR, "#000000"),
"width": (SIZE, MUST),
"height": (SIZE, MUST),
}),
"pdfchartdata": (0, {
"set": (STRING, MUST),
"value": (STRING),
# "label": (STRING),
"strokecolor": (COLOR),
"fillcolor": (COLOR),
"strokewidth": (SIZE),
}),
"pdfchartlabel": (0, {
"value": (STRING, MUST),
}),
"pdfbarcode": (0, {
"value": (STRING, MUST),
"type": (["i2of5", "itf",
"code39", "extendedcode39",
"code93", "extendedcode93",
"msi",
"codabar", "nw7",
"code11",
"fim",
"postnet",
"usps4s",
"code128",
"ean13", "ean8",
"qr",
], "code128"),
"humanreadable": (STRING, "0"),
"vertical": (STRING, "0"),
"checksum": (STRING, "1"),
"barwidth": SIZE,
"barheight": SIZE,
"fontsize": SIZE,
"align": (["baseline", "top", "middle", "bottom"], "baseline"),
}),
# ========================================================
"link": (0, {
"href": (STRING, MUST),
"rel": (STRING, ""),
"type": (STRING, ""),
"media": (STRING, "all"),
"charset": (STRING, "latin1"), # XXX Must be something else...
}),
"meta": (0, {
"name": (STRING, ""),
"content": (STRING, ""),
}),
"style": (0, {
"type": (STRING, ""),
"media": (STRING, "all"),
}),
"img": (0, {
"src": (FILE, MUST),
"width": SIZE,
"height": SIZE,
"align": ["top", "middle", "bottom", "left", "right",
"texttop", "absmiddle", "absbottom", "baseline"],
}),
"table": (1, {
"align": (["left", "center", "right"], "left"),
"valign": (["top", "bottom", "middle"], "middle"),
"border": (SIZE, "0"),
"bordercolor": (COLOR, "#000000"),
"bgcolor": COLOR,
"cellpadding": (SIZE, "0"),
"cellspacing": (SIZE, "0"),
"repeat": (INT, "0"), # XXX Remove this! Set to 0
"width": STRING,
#"keepmaxwidth": SIZE,
#"keepmaxheight": SIZE,
#"keepmergespace": (INT, 1),
#"keepmode": (["error", "overflow", "shrink", "truncate"], "shrink"),
}),
"tr": (1, {
"bgcolor": COLOR,
"valign": ["top", "bottom", "middle"],
"border": SIZE,
"bordercolor": (COLOR, "#000000"),
}),
"td": (1, {
"align": ["left", "center", "right", "justify"],
"valign": ["top", "bottom", "middle"],
"width": STRING,
"bgcolor": COLOR,
"border": SIZE,
"bordercolor": (COLOR, "#000000"),
"colspan": INT,
"rowspan": INT,
}),
"th": (1, {
"align": ["left", "center", "right", "justify"],
"valign": ["top", "bottom", "middle"],
"width": STRING,
"bgcolor": COLOR,
"border": SIZE,
"bordercolor": (COLOR, "#000000"),
"colspan": INT,
"rowspan": INT,
}),
"dl": (1, {
}),
"dd": (1, {
}),
"dt": (1, {
}),
"ol": (1, {
"type": (["1", "a", "A", "i", "I"], "1"),
}),
"ul": (1, {
"type": (["circle", "disk", "square"], "disk"),
}),
"li": (1, {
}),
"hr": (0, {
"color": (COLOR, "#000000"),
"size": (SIZE, "1"),
"width": STRING,
"align": ["left", "center", "right", "justify"],
}),
"div": (1, {
"align": ["left", "center", "right", "justify"],
}),
"p": (1, {
"align": ["left", "center", "right", "justify"],
}),
"br": (0, {
}),
"h1": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"h2": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"h3": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"h4": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"h5": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"h6": (1, {
"outline": STRING,
"closed": (INT, 0),
"align": ["left", "center", "right", "justify"],
}),
"font": (1, {
"face": FONT,
"color": COLOR,
"size": STRING,
}),
"a": (1, {
"href": STRING,
"name": STRING,
}),
"input": (0, {
"name": STRING,
"value": STRING,
"type": (["text", "hidden", "checkbox"], "text"),
}),
"textarea": (1, {
"name": STRING,
}),
"select": (1, {
"name": STRING,
"value": STRING,
}),
"option": (0, {
"value": STRING,
}),
}
# XXX use "html" not "*" as default!
DEFAULT_CSS = """
html {
font-family: Helvetica;
font-size: 10px;
font-weight: normal;
color: #000000;
background-color: transparent;
margin: 0;
padding: 0;
line-height: 150%;
border: 1px none;
display: inline;
width: auto;
height: auto;
white-space: normal;
}
b,
strong {
font-weight: bold;
}
i,
em {
font-style: italic;
}
u {
text-decoration: underline;
}
s,
strike {
text-decoration: line-through;
}
a {
text-decoration: underline;
color: blue;
}
ins {
color: green;
text-decoration: underline;
}
del {
color: red;
text-decoration: line-through;
}
pre,
code,
kbd,
samp,
tt {
font-family: "Courier New";
}
h1,
h2,
h3,
h4,
h5,
h6 {
font-weight:bold;
-pdf-outline: true;
-pdf-outline-open: false;
}
h1 {
/*18px via YUI Fonts CSS foundation*/
font-size:138.5%;
-pdf-outline-level: 0;
}
h2 {
/*16px via YUI Fonts CSS foundation*/
font-size:123.1%;
-pdf-outline-level: 1;
}
h3 {
/*14px via YUI Fonts CSS foundation*/
font-size:108%;
-pdf-outline-level: 2;
}
h4 {
-pdf-outline-level: 3;
}
h5 {
-pdf-outline-level: 4;
}
h6 {
-pdf-outline-level: 5;
}
h1,
h2,
h3,
h4,
h5,
h6,
p,
pre,
hr {
margin:1em 0;
}
address,
blockquote,
body,
center,
dl,
dir,
div,
fieldset,
form,
h1,
h2,
h3,
h4,
h5,
h6,
hr,
isindex,
menu,
noframes,
noscript,
ol,
p,
pre,
table,
th,
tr,
td,
ul,
li,
dd,
dt,
pdftoc {
display: block;
}
table {
}
tr,
th,
td {
vertical-align: middle;
width: auto;
}
th {
text-align: center;
font-weight: bold;
}
center {
text-align: center;
}
big {
font-size: 125%;
}
small {
font-size: 75%;
}
ul {
margin-left: 1.5em;
list-style-type: disc;
}
ul ul {
list-style-type: circle;
}
ul ul ul {
list-style-type: square;
}
ol {
list-style-type: decimal;
margin-left: 1.5em;
}
pre {
white-space: pre;
}
blockquote {
margin-left: 1.5em;
margin-right: 1.5em;
}
noscript {
display: none;
}
"""
DEFAULT_FONT = {
"courier": "Courier",
"courier-bold": "Courier-Bold",
"courier-boldoblique": "Courier-BoldOblique",
"courier-oblique": "Courier-Oblique",
"helvetica": "Helvetica",
"helvetica-bold": "Helvetica-Bold",
"helvetica-boldoblique": "Helvetica-BoldOblique",
"helvetica-oblique": "Helvetica-Oblique",
"times": "Times-Roman",
"times-roman": "Times-Roman",
"times-bold": "Times-Bold",
"times-boldoblique": "Times-BoldOblique",
"times-oblique": "Times-Oblique",
"symbol": "Symbol",
"zapfdingbats": "ZapfDingbats",
"zapf-dingbats": "ZapfDingbats",
# Alias
"arial": "Helvetica",
"times new roman": "Times-Roman",
"georgia": "Times-Roman",
'serif': 'Times-Roman',
'sansserif': 'Helvetica',
'sans': 'Helvetica',
'monospaced': 'Courier',
'monospace': 'Courier',
'mono': 'Courier',
'courier new': 'Courier',
'verdana': 'Helvetica',
'geneva': 'Helvetica',
}
PML_PAGESIZES = {
"a0": A0,
"a1": A1,
"a2": A2,
"a3": A3,
"a4": A4,
"a5": A5,
"a6": A6,
"b0": B0,
"b1": B1,
"b2": B2,
"b3": B3,
"b4": B4,
"b5": B5,
"b6": B6,
"letter": LETTER,
"legal": LEGAL,
"ledger": ELEVENSEVENTEEN,
"elevenseventeen": ELEVENSEVENTEEN,
}
| [
"[email protected]"
] | |
3a017b900197b00c5f1cfd40e6aea2a65108e07c | d094ba0c8a9b1217fbf014aa79a283a49aabe88c | /env/lib/python3.6/site-packages/pandas/tests/tseries/test_frequencies.py | eb4e63654b47b49d76ebf30d279a75b472ad3292 | [
"Apache-2.0"
] | permissive | Raniac/NEURO-LEARN | d9274e0baadd97bb02da54bdfcf6ca091fc1c703 | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | refs/heads/master | 2022-12-25T23:46:54.922237 | 2020-09-06T03:15:14 | 2020-09-06T03:15:14 | 182,013,100 | 9 | 2 | Apache-2.0 | 2022-12-09T21:01:00 | 2019-04-18T03:57:00 | CSS | UTF-8 | Python | false | false | 29,684 | py | from datetime import datetime, timedelta
import numpy as np
import pytest
from pandas._libs.tslibs import frequencies as libfrequencies, resolution
from pandas._libs.tslibs.ccalendar import MONTHS
from pandas._libs.tslibs.frequencies import (
INVALID_FREQ_ERR_MSG, FreqGroup, _period_code_map, get_freq, get_freq_code)
import pandas.compat as compat
from pandas.compat import is_platform_windows, range
from pandas import (
DatetimeIndex, Index, Series, Timedelta, Timestamp, date_range,
period_range)
from pandas.core.tools.datetimes import to_datetime
import pandas.util.testing as tm
import pandas.tseries.frequencies as frequencies
import pandas.tseries.offsets as offsets
class TestToOffset(object):
def test_to_offset_multiple(self):
freqstr = '2h30min'
freqstr2 = '2h 30min'
result = frequencies.to_offset(freqstr)
assert (result == frequencies.to_offset(freqstr2))
expected = offsets.Minute(150)
assert (result == expected)
freqstr = '2h30min15s'
result = frequencies.to_offset(freqstr)
expected = offsets.Second(150 * 60 + 15)
assert (result == expected)
freqstr = '2h 60min'
result = frequencies.to_offset(freqstr)
expected = offsets.Hour(3)
assert (result == expected)
freqstr = '2h 20.5min'
result = frequencies.to_offset(freqstr)
expected = offsets.Second(8430)
assert (result == expected)
freqstr = '1.5min'
result = frequencies.to_offset(freqstr)
expected = offsets.Second(90)
assert (result == expected)
freqstr = '0.5S'
result = frequencies.to_offset(freqstr)
expected = offsets.Milli(500)
assert (result == expected)
freqstr = '15l500u'
result = frequencies.to_offset(freqstr)
expected = offsets.Micro(15500)
assert (result == expected)
freqstr = '10s75L'
result = frequencies.to_offset(freqstr)
expected = offsets.Milli(10075)
assert (result == expected)
freqstr = '1s0.25ms'
result = frequencies.to_offset(freqstr)
expected = offsets.Micro(1000250)
assert (result == expected)
freqstr = '1s0.25L'
result = frequencies.to_offset(freqstr)
expected = offsets.Micro(1000250)
assert (result == expected)
freqstr = '2800N'
result = frequencies.to_offset(freqstr)
expected = offsets.Nano(2800)
assert (result == expected)
freqstr = '2SM'
result = frequencies.to_offset(freqstr)
expected = offsets.SemiMonthEnd(2)
assert (result == expected)
freqstr = '2SM-16'
result = frequencies.to_offset(freqstr)
expected = offsets.SemiMonthEnd(2, day_of_month=16)
assert (result == expected)
freqstr = '2SMS-14'
result = frequencies.to_offset(freqstr)
expected = offsets.SemiMonthBegin(2, day_of_month=14)
assert (result == expected)
freqstr = '2SMS-15'
result = frequencies.to_offset(freqstr)
expected = offsets.SemiMonthBegin(2)
assert (result == expected)
# malformed
with pytest.raises(ValueError, match='Invalid frequency: 2h20m'):
frequencies.to_offset('2h20m')
def test_to_offset_negative(self):
freqstr = '-1S'
result = frequencies.to_offset(freqstr)
assert (result.n == -1)
freqstr = '-5min10s'
result = frequencies.to_offset(freqstr)
assert (result.n == -310)
freqstr = '-2SM'
result = frequencies.to_offset(freqstr)
assert (result.n == -2)
freqstr = '-1SMS'
result = frequencies.to_offset(freqstr)
assert (result.n == -1)
def test_to_offset_invalid(self):
# GH 13930
with pytest.raises(ValueError, match='Invalid frequency: U1'):
frequencies.to_offset('U1')
with pytest.raises(ValueError, match='Invalid frequency: -U'):
frequencies.to_offset('-U')
with pytest.raises(ValueError, match='Invalid frequency: 3U1'):
frequencies.to_offset('3U1')
with pytest.raises(ValueError, match='Invalid frequency: -2-3U'):
frequencies.to_offset('-2-3U')
with pytest.raises(ValueError, match='Invalid frequency: -2D:3H'):
frequencies.to_offset('-2D:3H')
with pytest.raises(ValueError, match='Invalid frequency: 1.5.0S'):
frequencies.to_offset('1.5.0S')
# split offsets with spaces are valid
assert frequencies.to_offset('2D 3H') == offsets.Hour(51)
assert frequencies.to_offset('2 D3 H') == offsets.Hour(51)
assert frequencies.to_offset('2 D 3 H') == offsets.Hour(51)
assert frequencies.to_offset(' 2 D 3 H ') == offsets.Hour(51)
assert frequencies.to_offset(' H ') == offsets.Hour()
assert frequencies.to_offset(' 3 H ') == offsets.Hour(3)
# special cases
assert frequencies.to_offset('2SMS-15') == offsets.SemiMonthBegin(2)
with pytest.raises(ValueError, match='Invalid frequency: 2SMS-15-15'):
frequencies.to_offset('2SMS-15-15')
with pytest.raises(ValueError, match='Invalid frequency: 2SMS-15D'):
frequencies.to_offset('2SMS-15D')
def test_to_offset_leading_zero(self):
freqstr = '00H 00T 01S'
result = frequencies.to_offset(freqstr)
assert (result.n == 1)
freqstr = '-00H 03T 14S'
result = frequencies.to_offset(freqstr)
assert (result.n == -194)
def test_to_offset_leading_plus(self):
freqstr = '+1d'
result = frequencies.to_offset(freqstr)
assert (result.n == 1)
freqstr = '+2h30min'
result = frequencies.to_offset(freqstr)
assert (result.n == 150)
for bad_freq in ['+-1d', '-+1h', '+1', '-7', '+d', '-m']:
with pytest.raises(ValueError, match='Invalid frequency:'):
frequencies.to_offset(bad_freq)
def test_to_offset_pd_timedelta(self):
# Tests for #9064
td = Timedelta(days=1, seconds=1)
result = frequencies.to_offset(td)
expected = offsets.Second(86401)
assert (expected == result)
td = Timedelta(days=-1, seconds=1)
result = frequencies.to_offset(td)
expected = offsets.Second(-86399)
assert (expected == result)
td = Timedelta(hours=1, minutes=10)
result = frequencies.to_offset(td)
expected = offsets.Minute(70)
assert (expected == result)
td = Timedelta(hours=1, minutes=-10)
result = frequencies.to_offset(td)
expected = offsets.Minute(50)
assert (expected == result)
td = Timedelta(weeks=1)
result = frequencies.to_offset(td)
expected = offsets.Day(7)
assert (expected == result)
td1 = Timedelta(hours=1)
result1 = frequencies.to_offset(td1)
result2 = frequencies.to_offset('60min')
assert (result1 == result2)
td = Timedelta(microseconds=1)
result = frequencies.to_offset(td)
expected = offsets.Micro(1)
assert (expected == result)
td = Timedelta(microseconds=0)
pytest.raises(ValueError, lambda: frequencies.to_offset(td))
def test_anchored_shortcuts(self):
result = frequencies.to_offset('W')
expected = frequencies.to_offset('W-SUN')
assert (result == expected)
result1 = frequencies.to_offset('Q')
result2 = frequencies.to_offset('Q-DEC')
expected = offsets.QuarterEnd(startingMonth=12)
assert (result1 == expected)
assert (result2 == expected)
result1 = frequencies.to_offset('Q-MAY')
expected = offsets.QuarterEnd(startingMonth=5)
assert (result1 == expected)
result1 = frequencies.to_offset('SM')
result2 = frequencies.to_offset('SM-15')
expected = offsets.SemiMonthEnd(day_of_month=15)
assert (result1 == expected)
assert (result2 == expected)
result = frequencies.to_offset('SM-1')
expected = offsets.SemiMonthEnd(day_of_month=1)
assert (result == expected)
result = frequencies.to_offset('SM-27')
expected = offsets.SemiMonthEnd(day_of_month=27)
assert (result == expected)
result = frequencies.to_offset('SMS-2')
expected = offsets.SemiMonthBegin(day_of_month=2)
assert (result == expected)
result = frequencies.to_offset('SMS-27')
expected = offsets.SemiMonthBegin(day_of_month=27)
assert (result == expected)
# ensure invalid cases fail as expected
invalid_anchors = ['SM-0', 'SM-28', 'SM-29',
'SM-FOO', 'BSM', 'SM--1',
'SMS-1', 'SMS-28', 'SMS-30',
'SMS-BAR', 'SMS-BYR' 'BSMS',
'SMS--2']
for invalid_anchor in invalid_anchors:
with pytest.raises(ValueError, match='Invalid frequency: '):
frequencies.to_offset(invalid_anchor)
def test_ms_vs_MS():
left = frequencies.get_offset('ms')
right = frequencies.get_offset('MS')
assert left == offsets.Milli()
assert right == offsets.MonthBegin()
def test_rule_aliases():
rule = frequencies.to_offset('10us')
assert rule == offsets.Micro(10)
class TestFrequencyCode(object):
def test_freq_code(self):
assert get_freq('A') == 1000
assert get_freq('3A') == 1000
assert get_freq('-1A') == 1000
assert get_freq('Y') == 1000
assert get_freq('3Y') == 1000
assert get_freq('-1Y') == 1000
assert get_freq('W') == 4000
assert get_freq('W-MON') == 4001
assert get_freq('W-FRI') == 4005
for freqstr, code in compat.iteritems(_period_code_map):
result = get_freq(freqstr)
assert result == code
result = resolution.get_freq_group(freqstr)
assert result == code // 1000 * 1000
result = resolution.get_freq_group(code)
assert result == code // 1000 * 1000
def test_freq_group(self):
assert resolution.get_freq_group('A') == 1000
assert resolution.get_freq_group('3A') == 1000
assert resolution.get_freq_group('-1A') == 1000
assert resolution.get_freq_group('A-JAN') == 1000
assert resolution.get_freq_group('A-MAY') == 1000
assert resolution.get_freq_group('Y') == 1000
assert resolution.get_freq_group('3Y') == 1000
assert resolution.get_freq_group('-1Y') == 1000
assert resolution.get_freq_group('Y-JAN') == 1000
assert resolution.get_freq_group('Y-MAY') == 1000
assert resolution.get_freq_group(offsets.YearEnd()) == 1000
assert resolution.get_freq_group(offsets.YearEnd(month=1)) == 1000
assert resolution.get_freq_group(offsets.YearEnd(month=5)) == 1000
assert resolution.get_freq_group('W') == 4000
assert resolution.get_freq_group('W-MON') == 4000
assert resolution.get_freq_group('W-FRI') == 4000
assert resolution.get_freq_group(offsets.Week()) == 4000
assert resolution.get_freq_group(offsets.Week(weekday=1)) == 4000
assert resolution.get_freq_group(offsets.Week(weekday=5)) == 4000
def test_get_to_timestamp_base(self):
tsb = libfrequencies.get_to_timestamp_base
assert (tsb(get_freq_code('D')[0]) ==
get_freq_code('D')[0])
assert (tsb(get_freq_code('W')[0]) ==
get_freq_code('D')[0])
assert (tsb(get_freq_code('M')[0]) ==
get_freq_code('D')[0])
assert (tsb(get_freq_code('S')[0]) ==
get_freq_code('S')[0])
assert (tsb(get_freq_code('T')[0]) ==
get_freq_code('S')[0])
assert (tsb(get_freq_code('H')[0]) ==
get_freq_code('S')[0])
def test_freq_to_reso(self):
Reso = resolution.Resolution
assert Reso.get_str_from_freq('A') == 'year'
assert Reso.get_str_from_freq('Q') == 'quarter'
assert Reso.get_str_from_freq('M') == 'month'
assert Reso.get_str_from_freq('D') == 'day'
assert Reso.get_str_from_freq('H') == 'hour'
assert Reso.get_str_from_freq('T') == 'minute'
assert Reso.get_str_from_freq('S') == 'second'
assert Reso.get_str_from_freq('L') == 'millisecond'
assert Reso.get_str_from_freq('U') == 'microsecond'
assert Reso.get_str_from_freq('N') == 'nanosecond'
for freq in ['A', 'Q', 'M', 'D', 'H', 'T', 'S', 'L', 'U', 'N']:
# check roundtrip
result = Reso.get_freq(Reso.get_str_from_freq(freq))
assert freq == result
for freq in ['D', 'H', 'T', 'S', 'L', 'U']:
result = Reso.get_freq(Reso.get_str(Reso.get_reso_from_freq(freq)))
assert freq == result
def test_resolution_bumping(self):
# see gh-14378
Reso = resolution.Resolution
assert Reso.get_stride_from_decimal(1.5, 'T') == (90, 'S')
assert Reso.get_stride_from_decimal(62.4, 'T') == (3744, 'S')
assert Reso.get_stride_from_decimal(1.04, 'H') == (3744, 'S')
assert Reso.get_stride_from_decimal(1, 'D') == (1, 'D')
assert (Reso.get_stride_from_decimal(0.342931, 'H') ==
(1234551600, 'U'))
assert Reso.get_stride_from_decimal(1.2345, 'D') == (106660800, 'L')
with pytest.raises(ValueError):
Reso.get_stride_from_decimal(0.5, 'N')
# too much precision in the input can prevent
with pytest.raises(ValueError):
Reso.get_stride_from_decimal(0.3429324798798269273987982, 'H')
def test_get_freq_code(self):
# frequency str
assert (get_freq_code('A') ==
(get_freq('A'), 1))
assert (get_freq_code('3D') ==
(get_freq('D'), 3))
assert (get_freq_code('-2M') ==
(get_freq('M'), -2))
# tuple
assert (get_freq_code(('D', 1)) ==
(get_freq('D'), 1))
assert (get_freq_code(('A', 3)) ==
(get_freq('A'), 3))
assert (get_freq_code(('M', -2)) ==
(get_freq('M'), -2))
# numeric tuple
assert get_freq_code((1000, 1)) == (1000, 1)
# offsets
assert (get_freq_code(offsets.Day()) ==
(get_freq('D'), 1))
assert (get_freq_code(offsets.Day(3)) ==
(get_freq('D'), 3))
assert (get_freq_code(offsets.Day(-2)) ==
(get_freq('D'), -2))
assert (get_freq_code(offsets.MonthEnd()) ==
(get_freq('M'), 1))
assert (get_freq_code(offsets.MonthEnd(3)) ==
(get_freq('M'), 3))
assert (get_freq_code(offsets.MonthEnd(-2)) ==
(get_freq('M'), -2))
assert (get_freq_code(offsets.Week()) ==
(get_freq('W'), 1))
assert (get_freq_code(offsets.Week(3)) ==
(get_freq('W'), 3))
assert (get_freq_code(offsets.Week(-2)) ==
(get_freq('W'), -2))
# Monday is weekday=0
assert (get_freq_code(offsets.Week(weekday=1)) ==
(get_freq('W-TUE'), 1))
assert (get_freq_code(offsets.Week(3, weekday=0)) ==
(get_freq('W-MON'), 3))
assert (get_freq_code(offsets.Week(-2, weekday=4)) ==
(get_freq('W-FRI'), -2))
def test_frequency_misc(self):
assert (resolution.get_freq_group('T') ==
FreqGroup.FR_MIN)
code, stride = get_freq_code(offsets.Hour())
assert code == FreqGroup.FR_HR
code, stride = get_freq_code((5, 'T'))
assert code == FreqGroup.FR_MIN
assert stride == 5
offset = offsets.Hour()
result = frequencies.to_offset(offset)
assert result == offset
result = frequencies.to_offset((5, 'T'))
expected = offsets.Minute(5)
assert result == expected
with pytest.raises(ValueError, match='Invalid frequency'):
get_freq_code((5, 'baz'))
with pytest.raises(ValueError, match='Invalid frequency'):
frequencies.to_offset('100foo')
with pytest.raises(ValueError, match='Could not evaluate'):
frequencies.to_offset(('', ''))
_dti = DatetimeIndex
class TestFrequencyInference(object):
def test_raise_if_period_index(self):
index = period_range(start="1/1/1990", periods=20, freq="M")
pytest.raises(TypeError, frequencies.infer_freq, index)
def test_raise_if_too_few(self):
index = _dti(['12/31/1998', '1/3/1999'])
pytest.raises(ValueError, frequencies.infer_freq, index)
def test_business_daily(self):
index = _dti(['01/01/1999', '1/4/1999', '1/5/1999'])
assert frequencies.infer_freq(index) == 'B'
def test_business_daily_look_alike(self):
# GH 16624, do not infer 'B' when 'weekend' (2-day gap) in wrong place
index = _dti(['12/31/1998', '1/3/1999', '1/4/1999'])
assert frequencies.infer_freq(index) is None
def test_day(self):
self._check_tick(timedelta(1), 'D')
def test_day_corner(self):
index = _dti(['1/1/2000', '1/2/2000', '1/3/2000'])
assert frequencies.infer_freq(index) == 'D'
def test_non_datetimeindex(self):
dates = to_datetime(['1/1/2000', '1/2/2000', '1/3/2000'])
assert frequencies.infer_freq(dates) == 'D'
def test_hour(self):
self._check_tick(timedelta(hours=1), 'H')
def test_minute(self):
self._check_tick(timedelta(minutes=1), 'T')
def test_second(self):
self._check_tick(timedelta(seconds=1), 'S')
def test_millisecond(self):
self._check_tick(timedelta(microseconds=1000), 'L')
def test_microsecond(self):
self._check_tick(timedelta(microseconds=1), 'U')
def test_nanosecond(self):
self._check_tick(np.timedelta64(1, 'ns'), 'N')
def _check_tick(self, base_delta, code):
b = Timestamp(datetime.now())
for i in range(1, 5):
inc = base_delta * i
index = _dti([b + inc * j for j in range(3)])
if i > 1:
exp_freq = '%d%s' % (i, code)
else:
exp_freq = code
assert frequencies.infer_freq(index) == exp_freq
index = _dti([b + base_delta * 7] + [b + base_delta * j for j in range(
3)])
assert frequencies.infer_freq(index) is None
index = _dti([b + base_delta * j for j in range(3)] + [b + base_delta *
7])
assert frequencies.infer_freq(index) is None
def test_weekly(self):
days = ['MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT', 'SUN']
for day in days:
self._check_generated_range('1/1/2000', 'W-%s' % day)
def test_week_of_month(self):
days = ['MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT', 'SUN']
for day in days:
for i in range(1, 5):
self._check_generated_range('1/1/2000', 'WOM-%d%s' % (i, day))
def test_fifth_week_of_month(self):
# Only supports freq up to WOM-4. See #9425
func = lambda: date_range('2014-01-01', freq='WOM-5MON')
pytest.raises(ValueError, func)
def test_fifth_week_of_month_infer(self):
# Only attempts to infer up to WOM-4. See #9425
index = DatetimeIndex(["2014-03-31", "2014-06-30", "2015-03-30"])
assert frequencies.infer_freq(index) is None
def test_week_of_month_fake(self):
# All of these dates are on same day of week and are 4 or 5 weeks apart
index = DatetimeIndex(["2013-08-27", "2013-10-01", "2013-10-29",
"2013-11-26"])
assert frequencies.infer_freq(index) != 'WOM-4TUE'
def test_monthly(self):
self._check_generated_range('1/1/2000', 'M')
def test_monthly_ambiguous(self):
rng = _dti(['1/31/2000', '2/29/2000', '3/31/2000'])
assert rng.inferred_freq == 'M'
def test_business_monthly(self):
self._check_generated_range('1/1/2000', 'BM')
def test_business_start_monthly(self):
self._check_generated_range('1/1/2000', 'BMS')
def test_quarterly(self):
for month in ['JAN', 'FEB', 'MAR']:
self._check_generated_range('1/1/2000', 'Q-%s' % month)
def test_annual(self):
for month in MONTHS:
self._check_generated_range('1/1/2000', 'A-%s' % month)
def test_business_annual(self):
for month in MONTHS:
self._check_generated_range('1/1/2000', 'BA-%s' % month)
def test_annual_ambiguous(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
assert rng.inferred_freq == 'A-JAN'
def _check_generated_range(self, start, freq):
freq = freq.upper()
gen = date_range(start, periods=7, freq=freq)
index = _dti(gen.values)
if not freq.startswith('Q-'):
assert frequencies.infer_freq(index) == gen.freqstr
else:
inf_freq = frequencies.infer_freq(index)
is_dec_range = inf_freq == 'Q-DEC' and gen.freqstr in (
'Q', 'Q-DEC', 'Q-SEP', 'Q-JUN', 'Q-MAR')
is_nov_range = inf_freq == 'Q-NOV' and gen.freqstr in (
'Q-NOV', 'Q-AUG', 'Q-MAY', 'Q-FEB')
is_oct_range = inf_freq == 'Q-OCT' and gen.freqstr in (
'Q-OCT', 'Q-JUL', 'Q-APR', 'Q-JAN')
assert is_dec_range or is_nov_range or is_oct_range
gen = date_range(start, periods=5, freq=freq)
index = _dti(gen.values)
if not freq.startswith('Q-'):
assert frequencies.infer_freq(index) == gen.freqstr
else:
inf_freq = frequencies.infer_freq(index)
is_dec_range = inf_freq == 'Q-DEC' and gen.freqstr in (
'Q', 'Q-DEC', 'Q-SEP', 'Q-JUN', 'Q-MAR')
is_nov_range = inf_freq == 'Q-NOV' and gen.freqstr in (
'Q-NOV', 'Q-AUG', 'Q-MAY', 'Q-FEB')
is_oct_range = inf_freq == 'Q-OCT' and gen.freqstr in (
'Q-OCT', 'Q-JUL', 'Q-APR', 'Q-JAN')
assert is_dec_range or is_nov_range or is_oct_range
def test_infer_freq(self):
rng = period_range('1959Q2', '2009Q3', freq='Q')
rng = Index(rng.to_timestamp('D', how='e').astype(object))
assert rng.inferred_freq == 'Q-DEC'
rng = period_range('1959Q2', '2009Q3', freq='Q-NOV')
rng = Index(rng.to_timestamp('D', how='e').astype(object))
assert rng.inferred_freq == 'Q-NOV'
rng = period_range('1959Q2', '2009Q3', freq='Q-OCT')
rng = Index(rng.to_timestamp('D', how='e').astype(object))
assert rng.inferred_freq == 'Q-OCT'
def test_infer_freq_tz(self):
freqs = {'AS-JAN':
['2009-01-01', '2010-01-01', '2011-01-01', '2012-01-01'],
'Q-OCT':
['2009-01-31', '2009-04-30', '2009-07-31', '2009-10-31'],
'M': ['2010-11-30', '2010-12-31', '2011-01-31', '2011-02-28'],
'W-SAT':
['2010-12-25', '2011-01-01', '2011-01-08', '2011-01-15'],
'D': ['2011-01-01', '2011-01-02', '2011-01-03', '2011-01-04'],
'H': ['2011-12-31 22:00', '2011-12-31 23:00',
'2012-01-01 00:00', '2012-01-01 01:00']}
# GH 7310
for tz in [None, 'Australia/Sydney', 'Asia/Tokyo', 'Europe/Paris',
'US/Pacific', 'US/Eastern']:
for expected, dates in compat.iteritems(freqs):
idx = DatetimeIndex(dates, tz=tz)
assert idx.inferred_freq == expected
def test_infer_freq_tz_transition(self):
# Tests for #8772
date_pairs = [['2013-11-02', '2013-11-5'], # Fall DST
['2014-03-08', '2014-03-11'], # Spring DST
['2014-01-01', '2014-01-03']] # Regular Time
freqs = ['3H', '10T', '3601S', '3600001L', '3600000001U',
'3600000000001N']
for tz in [None, 'Australia/Sydney', 'Asia/Tokyo', 'Europe/Paris',
'US/Pacific', 'US/Eastern']:
for date_pair in date_pairs:
for freq in freqs:
idx = date_range(date_pair[0], date_pair[
1], freq=freq, tz=tz)
assert idx.inferred_freq == freq
index = date_range("2013-11-03", periods=5,
freq="3H").tz_localize("America/Chicago")
assert index.inferred_freq is None
def test_infer_freq_businesshour(self):
# GH 7905
idx = DatetimeIndex(
['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00',
'2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00'])
# hourly freq in a day must result in 'H'
assert idx.inferred_freq == 'H'
idx = DatetimeIndex(
['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00',
'2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00',
'2014-07-01 15:00', '2014-07-01 16:00', '2014-07-02 09:00',
'2014-07-02 10:00', '2014-07-02 11:00'])
assert idx.inferred_freq == 'BH'
idx = DatetimeIndex(
['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00',
'2014-07-04 12:00', '2014-07-04 13:00', '2014-07-04 14:00',
'2014-07-04 15:00', '2014-07-04 16:00', '2014-07-07 09:00',
'2014-07-07 10:00', '2014-07-07 11:00'])
assert idx.inferred_freq == 'BH'
idx = DatetimeIndex(
['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00',
'2014-07-04 12:00', '2014-07-04 13:00', '2014-07-04 14:00',
'2014-07-04 15:00', '2014-07-04 16:00', '2014-07-07 09:00',
'2014-07-07 10:00', '2014-07-07 11:00', '2014-07-07 12:00',
'2014-07-07 13:00', '2014-07-07 14:00', '2014-07-07 15:00',
'2014-07-07 16:00', '2014-07-08 09:00', '2014-07-08 10:00',
'2014-07-08 11:00', '2014-07-08 12:00', '2014-07-08 13:00',
'2014-07-08 14:00', '2014-07-08 15:00', '2014-07-08 16:00'])
assert idx.inferred_freq == 'BH'
def test_not_monotonic(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
rng = rng[::-1]
assert rng.inferred_freq == '-1A-JAN'
def test_non_datetimeindex2(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
vals = rng.to_pydatetime()
result = frequencies.infer_freq(vals)
assert result == rng.inferred_freq
def test_invalid_index_types(self):
# test all index types
for i in [tm.makeIntIndex(10), tm.makeFloatIndex(10),
tm.makePeriodIndex(10)]:
pytest.raises(TypeError, lambda: frequencies.infer_freq(i))
# GH 10822
# odd error message on conversions to datetime for unicode
if not is_platform_windows():
for i in [tm.makeStringIndex(10), tm.makeUnicodeIndex(10)]:
pytest.raises(ValueError, lambda: frequencies.infer_freq(i))
def test_string_datetimelike_compat(self):
# GH 6463
expected = frequencies.infer_freq(['2004-01', '2004-02', '2004-03',
'2004-04'])
result = frequencies.infer_freq(Index(['2004-01', '2004-02', '2004-03',
'2004-04']))
assert result == expected
def test_series(self):
# GH6407
# inferring series
# invalid type of Series
for s in [Series(np.arange(10)), Series(np.arange(10.))]:
pytest.raises(TypeError, lambda: frequencies.infer_freq(s))
# a non-convertible string
pytest.raises(ValueError, lambda: frequencies.infer_freq(
Series(['foo', 'bar'])))
# cannot infer on PeriodIndex
for freq in [None, 'L']:
s = Series(period_range('2013', periods=10, freq=freq))
pytest.raises(TypeError, lambda: frequencies.infer_freq(s))
# DateTimeIndex
for freq in ['M', 'L', 'S']:
s = Series(date_range('20130101', periods=10, freq=freq))
inferred = frequencies.infer_freq(s)
assert inferred == freq
s = Series(date_range('20130101', '20130110'))
inferred = frequencies.infer_freq(s)
assert inferred == 'D'
def test_legacy_offset_warnings(self):
freqs = ['WEEKDAY', 'EOM', 'W@MON', 'W@TUE', 'W@WED', 'W@THU',
'W@FRI', 'W@SAT', 'W@SUN', 'Q@JAN', 'Q@FEB', 'Q@MAR',
'A@JAN', 'A@FEB', 'A@MAR', 'A@APR', 'A@MAY', 'A@JUN',
'A@JUL', 'A@AUG', 'A@SEP', 'A@OCT', 'A@NOV', 'A@DEC',
'Y@JAN', 'WOM@1MON', 'WOM@2MON', 'WOM@3MON',
'WOM@4MON', 'WOM@1TUE', 'WOM@2TUE', 'WOM@3TUE',
'WOM@4TUE', 'WOM@1WED', 'WOM@2WED', 'WOM@3WED',
'WOM@4WED', 'WOM@1THU', 'WOM@2THU', 'WOM@3THU',
'WOM@4THU', 'WOM@1FRI', 'WOM@2FRI', 'WOM@3FRI',
'WOM@4FRI']
msg = INVALID_FREQ_ERR_MSG
for freq in freqs:
with pytest.raises(ValueError, match=msg):
frequencies.get_offset(freq)
with pytest.raises(ValueError, match=msg):
date_range('2011-01-01', periods=5, freq=freq)
| [
"[email protected]"
] | |
72b7202949f5cca76c478a94e6d464b4c7029db6 | baaaaea93a277f95f8f1652d8bbf3da07d2b0f54 | /model-optimizer/extensions/front/tf/TFSliceToSlice.py | 62e598784522491f0171e510b654349398efd8fa | [
"Apache-2.0"
] | permissive | Drizshko/openvino | 7c0fa3449dd2f3b10b6972842a1fdf1b696a9cd5 | a6989bb0471d734ed0ea207fbae7f357fc60aa2d | refs/heads/master | 2023-03-09T09:14:58.390464 | 2021-02-22T16:03:23 | 2021-02-22T16:03:23 | 267,969,920 | 0 | 0 | Apache-2.0 | 2020-05-29T22:50:51 | 2020-05-29T22:50:50 | null | UTF-8 | Python | false | false | 3,711 | py | """
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from extensions.ops.Cast import Cast
from extensions.ops.elementwise import Add, Equal
from extensions.ops.select import Select
from mo.front.common.replacement import FrontReplacementOp
from mo.graph.graph import Graph, rename_nodes
from mo.ops.const import Const
from mo.ops.slice import Slice
class TFSliceToSliceReplacer(FrontReplacementOp):
"""
This transformation converts TFSlice to internal Slice operation.
In TFSlice size[i] == -1 means take all elements on axis i up to the end including(!) the last
In internal MO Slice (which is borrowed from ONNX) -1 means take all excluding(!) the last (shape[i] - 1).
Also TFSlice has 'sizes' on the second input while Slice has 'ends'.
This transformation was added to avoid multiple if statements in future transformations.
"""
op = 'TFSlice'
enabled = True
def replace_sub_graph(self, graph: Graph, match: dict):
node = match['op']
slice_name = node.soft_get('name', node.id)
slice_node = Slice(graph).create_node()
rename_nodes([(node, slice_name + '/to_be_removed'), (slice_node, slice_name)])
eq_node = Equal(graph, {'name': slice_name + '/equal'}).create_node()
minus_one_node = Const(graph, {'name': slice_name + '/minus_one', 'value': np.array(-1)}).create_node()
int32_max_node = Const(graph, {'name': slice_name + '/int32_max', 'value': np.iinfo(np.int32).max}).create_node()
select_node = Select(graph, {'name': slice_name + '/select'}).create_node()
# node to convert sizes to ends
sum_node = Add(graph, {'name': slice_name + '/end_const'}).create_node()
# reconnect input from tfslice to slice
node.in_port(0).get_source().connect(slice_node.in_port(0))
node.in_port(0).disconnect()
# reconnect begin of tfslice to start of slice
node.in_port(1).get_source().connect(slice_node.in_port(1))
node.in_port(1).disconnect()
# (size -> ends) reconnect begins and sizes to sum to evaluate ends for Slice
# connects begins to slice
slice_node.in_port(1).get_source().connect(sum_node.in_port(0))
node.in_port(2).get_source().connect(sum_node.in_port(1))
node.in_port(2).disconnect()
# if size[i] == -1 when take int32_max as end[i]
sum_node.in_port(1).get_source().connect(eq_node.in_port(0))
minus_one_node.out_port(0).connect(eq_node.in_port(1))
# from equal to 0 port of select
eq_node.out_port(0).connect(select_node.in_port(0))
# from int32_max to 1 of select
int32_max_node.out_port(0).connect(select_node.in_port(1))
# from sum to 2nd of select
sum_node.out_port(0).connect(select_node.in_port(2))
# out of select to end (2nd of slice)
select_node.out_port(0).connect(slice_node.in_port(2))
cast = Cast(graph, dict(name=sum_node.name + '/CastToI64', dst_type=np.int64)).create_node()
select_node.in_port(2).get_connection().insert_node(cast)
node.out_port(0).get_connection().set_source(slice_node.out_port(0))
| [
"[email protected]"
] | |
f398afa73012ee5d8503db1609f29de578e06e69 | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/Surf.py | 867f58a5c5ab599827f58db2fea9624b3dbda404 | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113,350 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# ##### END GPL LICENSE BLOCK #####
#//////////////////////////////// - AUTHORS YO - ///////////////////////////
#Original Author - Eclectiel
#Previous Updators - patmo141, chichiri
#Blender 2.7x Maintainer - Crocadillian
#This states the metadata for the plugin
bl_info = {
"name": "Surf",
"author": "Crocadillian, Eclectiel, patmo141, chichiri",
"version": (0,75),
"blender": (2, 7, 0),
"api": 39347,
"location": "3D View > Object Mode > Tools > Grease Pencil",
#"description": "Easily sketch meshes with grease pencil and metaballs",
#In case i add a few quick tools for quickly applying mesh data to splines, I wanted to expand the description :3
"description": "Sketch and generate meshes with the grease pencil",
"warning": "Beta",
"wiki_url": "",
"category": "Learnbgame",
}
#This imports various items from the Python API for use in the script
import bpy, bmesh, time
from math import *
from bpy.props import IntProperty, BoolProperty, FloatProperty, EnumProperty
#Just variable definitions
mball_definition = 2
mball_wire_resolution = 0.1
degree_per_radian = 0.0174532925
def Update_StrokeSize(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
strokes_to_select.append(stroke)
# Find the Curve
stroke_size = float(self.ASKETCH_stroke_size)
central_size = float(self.ASKETCH_stroke_central_size)
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Call the Set Curve Radius class
bpy.ops.object.editmode_toggle()
ASKETCH_SetStrokeRadius(curve_data, stroke_size, central_size)
bpy.ops.object.editmode_toggle()
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_StrokeDensity(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
strokes_to_select.append(stroke)
# Find the Curve
stroke.modifiers["Array"].relative_offset_displace = [self.ASKETCH_stroke_element_offset, 0, 0]
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_Normalise(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
strokes_to_select.append(stroke)
# Change the internal values of the object
stroke.ASKETCH_stroke_size = 1
stroke.ASKETCH_stroke_element_offset = 1
stroke.ASKETCH_stroke_central_size = 1
# Find the Curve
stroke.modifiers["Array"].relative_offset_displace = [1, 0, 0]
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Call the Set Curve Radius class
bpy.ops.object.editmode_toggle()
ASKETCH_SetStrokeRadius(curve_data, 1, 1)
bpy.ops.object.editmode_toggle()
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return {"FINISHED"}
def Update_XMirror(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
if stroke.ASKETCH_x_mirror_on is True:
# Add the mirror modifier
FocusObject(stroke.name)
bpy.ops.object.modifier_add(type='MIRROR')
stroke.modifiers['Mirror'].use_X = True
else:
FocusObject(stroke.name)
scene = bpy.context.scene
mod_types = {'MIRROR'}
# Get an array of the active modifiers in the stroke
mod_active = [mod.show_viewport for mod in stroke.modifiers]
# THANKS BLENDER ARTISTS USER CoDEmannX for this code!
for mod in stroke.modifiers:
if mod.type not in mod_types:
mod.show_viewport = False
me = stroke.to_mesh(scene, False, 'PREVIEW')
for mod, active in zip(stroke.modifiers, mod_active):
if mod.type in mod_types:
stroke.modifiers.remove(mod)
else:
mod.show_viewport = active
# Note: this only swaps the object's data, but doesn't remove the original mesh
stroke.data = me
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
print("We're updating!")
return None
def Update_MergeElements(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
FocusObject(stroke.name)
stroke.modifiers['Array'].use_merge_vertices = stroke.ASKETCH_connect_elements
stroke.modifiers['Array'].use_merge_vertices_cap = stroke.ASKETCH_connect_elements
stroke.modifiers['Array'].merge_threshold = 1.0
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_CurveObject(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
if stroke.ASKETCH_object_curve is True:
FocusObject(stroke.name)
#modifiers = object.modifiers
#for modifier in object.modifiers:
# if (modifier.type == Blender.Modifier.Types.SUBSURF):
# object.modifiers.remove(modifier)
# object.makeDisplayList()
scene = bpy.context.scene
mod_types = {'ARRAY'}
# Get an array of the active modifiers in the stroke
mod_active = [mod.show_viewport for mod in stroke.modifiers]
# THANKS BLENDER ARTISTS USER CoDEmannX for this code!
for mod in stroke.modifiers:
if mod.type not in mod_types:
mod.show_viewport = False
me = stroke.to_mesh(scene, False, 'PREVIEW')
for mod, active in zip(stroke.modifiers, mod_active):
if mod.type in mod_types:
stroke.modifiers.remove(mod)
else:
mod.show_viewport = active
# Note: this only swaps the object's data, but doesn't remove the original mesh
stroke.data = me
else:
FocusObject(stroke.name)
bpy.ops.object.modifier_add(type='ARRAY')
if stroke.ASKETCH_connect_elements is True:
stroke.modifiers['Array'].use_merge_vertices = True
stroke.modifiers['Array'].use_merge_vertices_cap = True
stroke.modifiers['Array'].merge_threshold = 1.0
# Modifies the Array attributes
stroke.modifiers["Array"].relative_offset_displace = [self.ASKETCH_stroke_element_offset, 0, 0]
stroke.modifiers["Array"].fit_type = "FIT_CURVE"
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
stroke.modifiers["Array"].curve = bpy.context.scene.objects.active
# Push the modifier to the top of the stack
FocusObject(stroke.name)
bpy.ops.object.modifier_move_up(modifier="Array")
bpy.ops.object.modifier_move_up(modifier="Array")
bpy.ops.object.modifier_move_up(modifier="Array")
bpy.ops.object.modifier_move_up(modifier="Array")
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_LockTransform(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
if self.ASKETCH_lock_transform is True:
bpy.data.objects[stroke.name].lock_location[0] = True
bpy.data.objects[stroke.name].lock_location[1] = True
bpy.data.objects[stroke.name].lock_location[2] = True
if self.ASKETCH_lock_transform is False:
bpy.data.objects[stroke.name].lock_location[0] = False
bpy.data.objects[stroke.name].lock_location[1] = False
bpy.data.objects[stroke.name].lock_location[2] = False
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_TwistMode(self,context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
# Store the stroke
strokes_to_select.append(stroke)
# Obtain the ENUM
selected_object = int(self.ASKETCH_twist_mode)
# Get the curve instead
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
# Tangent
if selected_object == 1:
bpy.context.active_object.data.twist_mode = 'TANGENT'
# Minimum
if selected_object == 2:
bpy.context.active_object.data.twist_mode = 'MINIMUM'
# Z-Up
if selected_object == 3:
bpy.context.active_object.data.twist_mode = 'Z_UP'
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_TwistTilt(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
# Add it in the array so it can be re-selected later
strokes_to_select.append(stroke)
tilt_increment = (self.ASKETCH_tilt - self.ASKETCH_tilt_old) * degree_per_radian
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Change the point tilt
#ASKETCH_SetStrokeTilt(curve_data, self.ASKETCH_tilt)
for checkPoints in bpy.data.curves[curve_data.name].splines[0].bezier_points:
checkPoints.tilt = tilt_increment + checkPoints.tilt
self.ASKETCH_tilt_old = self.ASKETCH_tilt
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_NormaliseTilt(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
# Add it in the array so it can be re-selected later
strokes_to_select.append(stroke)
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Change the point tilt
#ASKETCH_SetStrokeTilt(curve_data, self.ASKETCH_tilt)
for checkPoints in bpy.data.curves[curve_data.name].splines[0].bezier_points:
checkPoints.tilt = 0
# Cheap way of forcing the object to redraw
bpy.ops.object.editmode_toggle()
self.ASKETCH_tilt = 0.0
self.ASKETCH_tilt_old = 0.0
bpy.ops.object.editmode_toggle()
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_ObjectOrigin(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
# Add it in the array so it can be re-selected later
strokes_to_select.append(stroke)
selected_item = int(self.ASKETCH_origin_point)
print("Going to UpdateObjectOrigin")
ASKETCH_SetObjectOrigin(self, selected_item, context)
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
def Update_OriginUpdate(self, context):
if bpy.context.scene.ASKETCH_live_update is not False:
# Create an array to store all found objects
strokes_to_select = []
strokes_to_make_active = []
if bpy.context.active_object.name.find(".SKO") != -1:
strokes_to_make_active.append(bpy.context.active_object)
# Find all the Stroke Objects in the scene
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
if self.SCENE_origin_update is True:
# Add it in the array so it can be re-selected later
strokes_to_select.append(stroke)
FocusObject(stroke.name)
enum = int(bpy.context.active_object.ASKETCH_origin_point)
print("Rawr")
print("Going to UpdateObjectOrigin")
ASKETCH_SetObjectOrigin(stroke, enum, context)
bpy.ops.object.select_all(action='DESELECT')
# Re-select all stored objects
for strokeEnd in strokes_to_select:
bpy.ops.object.select_pattern(pattern=strokeEnd.name)
for strokeActive in strokes_to_make_active:
bpy.ops.object.select_pattern(pattern=strokeActive.name)
bpy.context.scene.objects.active = strokeActive
return None
#///////////////// - ADDITIONAL PROPERTY DEFINITIONS - ///////////////////////////
bpy.types.Object.ASKETCH_stroke_size = bpy.props.FloatProperty(
name = "Stroke Size",
description = "Change the stroke size",
update = Update_StrokeSize,
default = 1, soft_min = 0.25, soft_max = 3, min = 0.1, max = 10)
bpy.types.Scene.SCENE_stroke_size = bpy.props.FloatProperty(
name = "Stroke Size",
description = "Change the stroke size",
default = 1, soft_min = 0.25, soft_max = 3, min = 0.1, max = 10)
bpy.types.Object.ASKETCH_stroke_element_offset = bpy.props.FloatProperty(
name = "Stroke Density",
description = "Change the space between elements along the curve. Smaller numbers = Denser curve. WARNING - Dont use on a value below 0.5 when Merge Elements is active.",
update = Update_StrokeDensity,
default = 1, soft_min = 0.25, soft_max = 3, min = 0.1, max = 3)
bpy.types.Scene.SCENE_stroke_element_offset = bpy.props.FloatProperty(
name = "Stroke Density",
description = "Change the space between elements along the curve. Smaller numbers = Denser curve. WARNING - Dont use on a value below 0.5 when Merge Elements is active.",
default = 1, soft_min = 0.25, soft_max = 3, min = 0.1, max = 3)
bpy.types.Object.ASKETCH_stroke_central_size = bpy.props.FloatProperty(
name = "Midpoint Scale",
description = "Change the scale of the brush at the center of the stroke",
update = Update_StrokeSize,
default = 1, soft_min = 0.25, soft_max = 10, min = 0.1, max = 15)
bpy.types.Scene.SCENE_stroke_central_size = bpy.props.FloatProperty(
name = "Midpoint Scale",
description = "Change the scale of the brush at the center of the stroke",
default = 1, soft_min = 0.25, soft_max = 10, min = 0.1, max = 15)
bpy.types.Object.ASKETCH_twist_mode = bpy.props.EnumProperty(
name="Twist Mode",
items=(
('1', 'Tangent', 'Use the tangent to calculate twist.'),
('2', 'Minimum', 'Use the least twist over the entire curve'),
('3', 'Z-Up', 'Use the Z-Axis to calculate the curve twist at each point'),
),
update = Update_TwistMode)
bpy.types.Object.ASKETCH_tilt = bpy.props.FloatProperty(
name = "Tilt",
description = "Rotate the stroke across the curve",
update = Update_TwistTilt,
default = 0.0, soft_min = 0.0, soft_max = 360, min = 0.0, max = 360)
bpy.types.Object.ASKETCH_tilt_old = bpy.props.FloatProperty(
name = "TiltOld",
description = "Rotate the stroke across the curve",
default = 0.0, soft_min = 0.0, soft_max = 360, min = 0.0, max = 360)
bpy.types.Object.ASKETCH_smooth = bpy.props.FloatProperty(
name = "Smooth",
description = "Sets how much tilt smoothing is performed",
#update = Update_TwistSmooth,
default = 0, soft_min = 0, soft_max = 20, min = 0, max = 20)
bpy.types.Object.ASKETCH_origin_point = bpy.props.EnumProperty(
name="Set Object Origin",
items=(
('1', 'Dont Set Origin', 'Leaves the origin to its original position'),
('2', 'Origin to Centre of Mass', 'Sets the origin using the objects centre of mass.'),
('3', 'Origin to Start of Curve', 'Sets the origin to the start of the curve'),
('4', 'Origin to End of Curve', 'Sets the origin to the end of the curve'),
),
update = Update_ObjectOrigin)
bpy.types.Scene.SCENE_origin_point = bpy.props.EnumProperty(
name="Set Scene Origin",
items=(
('1', 'Origin to Active Object', 'Sets the origin to the active objects origin.'),
('2', 'Origin to Cursor', 'Sets the origin to the current cursor location'),
('3', 'Origin to Centre of Mass', 'Sets the origin using the objects centre of mass.'),
('4', 'Origin to Start of Curve', 'Sets the origin to the start of the curve'),
('5', 'Origin to End of Curve', 'Sets the origin to the end of the curve'),
),)
bpy.types.Scene.SCENE_origin_update = bpy.props.BoolProperty(
name = "Update Origin",
description = "Keeps the origin updated whenever the curve is changed",
update = Update_OriginUpdate,
default = False)
bpy.types.Scene.ASKETCH_live_update = bpy.props.BoolProperty(
name = "Edit Selected Objects",
description = "Updates every selected object when Sketch Settings are changed",
default = False)
bpy.types.Object.ASKETCH_x_mirror_on = bpy.props.BoolProperty(
name = "X Mirror",
description = "Mirror the stroke across the X axis",
update = Update_XMirror,
default = False)
bpy.types.Scene.SCENE_x_mirror_on = bpy.props.BoolProperty(
name = "X Mirror",
description = "Mirror the stroke across the X axis",
default = False)
bpy.types.Object.ASKETCH_connect_elements = bpy.props.BoolProperty(
name = "Merge Elements",
description = "Merges the ends of objects together to create a connected, seamless mesh",
update = Update_MergeElements,
default = False)
bpy.types.Scene.SCENE_connect_elements = bpy.props.BoolProperty(
name = "Merge Elements",
description = "Merges the ends of objects together to create a connected, seamless mesh",
default = False)
bpy.types.Object.ASKETCH_object_curve = bpy.props.BoolProperty(
name = "Curve Object",
description = "Bends a singular instance of the mesh along a curve",
update = Update_CurveObject,
default = False)
bpy.types.Scene.SCENE_object_curve = bpy.props.BoolProperty(
name = "Curve Object",
description = "Bends a singular instance of the mesh along a curve",
default = False)
bpy.types.Object.ASKETCH_lock_transform = bpy.props.BoolProperty(
name = "Lock Transform",
description = "Prevents generated curve from moving if ticked",
update = Update_LockTransform,
default = False)
bpy.types.Scene.SCENE_lock_transform = bpy.props.BoolProperty(
name = "Lock Transform",
description = "Prevents generated curve from moving if ticked",
default = False)
bpy.types.Scene.ASKETCH_brush_object = bpy.props.StringProperty(
name = "Brush",
description = "Name of the object used as brush",
default = "None")
bpy.types.Scene.ASKETCH_start_cap = bpy.props.StringProperty(
name = "Start Cap",
description = "Name of the object used as brush",
default = "None")
bpy.types.Scene.ASKETCH_end_cap = bpy.props.StringProperty(
name = "End Cap",
description = "Name of the object used as brush",
default = "None")
default_brush_name = "A_SK_brush_default"
# P - I don't understand what these do???
# Neither do I...
#//////////////////////////////// - MYSTERY ENTITY 01 - ////////////////////////////////
def ASKETCH_default_brush_object(self): return default_brush_name
bpy.types.Object.ASKETCH_default_brush_object = property(ASKETCH_default_brush_object)
def ASKETCH_brush_object(self): return default_brush_name
bpy.types.Object.ASKETCH_brush_object = property(ASKETCH_brush_object)
def ASKETCH_mball_stroke_definition(self): return mball_definition
bpy.types.Object.ASKETCH_mball_stroke_definition = property(ASKETCH_mball_stroke_definition)
def ASKETCH_mball_wire_resolution(self): return mball_wire_resolution
bpy.types.Object.ASKETCH_mball_wire_resolution = property(ASKETCH_mball_wire_resolution)
#//////////////////////////// - MYSTERY ENTITY 02 - ////////////////////////////////
#check if the default brush is already there:
#This code tries to manipulate data on registration, and Blender doesn't like that. BROKEN!
#defaultb_there = False
#for ob in bpy.data.objects:
# if(ob.name == 'AS_Brush_Default'):
# defaultb_there = True
#if not, then we add it:
#if(defaultb_there == False):
# bpy.ops.mesh.primitive_uv_sphere_add()
# bpy.data.objects[-1].name = 'AS_Brush_Default'
#///////////////////////////////////////////////////////////////////////////
#///////////////////////////////////////////////////////////////////////////
#///////////////////////////////////////////////////////////////////////////
#//////////////////// - USER INTERFACE PANELS - /////////////////////////////
class View3DPanel():
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
#Generates the UI panel inside the 3D view
class VIEW3D_PT_tools_ASKETCH_create(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_context = "objectmode"
bl_label = "Create Sketch"
bl_category = "Grease Pencil"
#def poll(self, context):
#return context.active_object != None
def draw(self, context):
layout = self.layout
scn = context.scene
ob = context.object
#layout.label(text="Add/Edit/Delete")
col_ad = layout.column(align=True)
col_ad.alignment = 'EXPAND'
row_ad = col_ad.row(align=True)
row_ad.operator("gpencil.asketch_stroke_draw", text="Add Stroke")
row_ad.operator("object.asketch_delete_strokes", text="Delete Stroke")
row_ad = col_ad.row(align=True)
row_ad.operator("object.asketch_stroke_editmode", text="Edit Stroke")
row_ad.operator("gpencil.asketch_clear_data", text="Clear Grease Strokes")
layout.separator()
col_brush = layout.column(align=True)
col_brush.alignment = 'EXPAND'
row_brush = col_brush.row(align=True)
#split = layout.split()
#col = split.column()
#col.label(text="Target:")
#col.prop(md, "target", text="")
row_brush.prop(scn, "ASKETCH_brush_object")
row_brush.operator("object.asketch_set_brush_object", text="", icon="FORWARD")
row_brush.operator("object.asketch_clear_brush_object", text="", icon="X")
col_brush.separator()
row_brush = col_brush.row(align=True)
row_brush.prop(scn, "ASKETCH_start_cap")
row_brush.operator("object.asketch_set_start_cap", text="", icon="FORWARD")
row_brush.operator("object.asketch_clear_start_cap", text="", icon="X")
col_brush.separator()
row_brush = col_brush.row(align=True)
row_brush.prop(scn, "ASKETCH_end_cap")
row_brush.operator("object.asketch_set_end_cap", text="", icon="FORWARD")
row_brush.operator("object.asketch_clear_end_cap", text="", icon="X")
#row_brush.prop(md, "bpy.context.scene.ASKETCH_brush_object", text="Rawr")
# row_update = layout.column(align=True)
# row_update.prop(scn, "ASKETCH_live_update_new")
#Generates the UI panel inside the 3D view
class VIEW3D_PT_tools_ASKETCH_edit_settings(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_context = "objectmode"
bl_label = "Sketch Settings"
bl_category = "Grease Pencil"
def draw(self, context):
layout = self.layout
scn = context.scene
ob = context.object
row_edit = layout.column(align=True)
row_edit.alignment = 'EXPAND'
row_edit = layout.column(align=True)
row_edit.prop(scn, "ASKETCH_live_update")
row_edit.separator()
if context.scene.ASKETCH_live_update is True and context.active_object.name.find(".SKO") != -1:
row_edit.prop(ob, "ASKETCH_stroke_size", slider = True)
row_edit.prop(ob, "ASKETCH_stroke_element_offset", slider = True)
row_edit.prop(ob, "ASKETCH_stroke_central_size", slider = True)
row_edit.operator("object.asketch_normalise_options", text="Normalise")
row_edit.separator()
col_origin = layout.row(align=True)
col_origin.alignment = 'EXPAND'
col_origin.prop(ob, "ASKETCH_origin_point", text = "", icon = "CURSOR")
col_origin.prop(scn, "SCENE_origin_update", text="", toggle = True, icon = "ALIGN", icon_only = True)
col_origin.separator()
layout.label(text="Stroke Options")
col_edit = layout.row(align=True)
row_edit= col_edit.row(align=True)
row_edit.alignment = 'EXPAND'
row_edit.prop(ob, "ASKETCH_x_mirror_on")
row_edit.prop(ob, "ASKETCH_connect_elements")
row_edit = layout.row(align=True)
row_edit.prop(ob, "ASKETCH_object_curve")
row_edit.prop(ob, "ASKETCH_lock_transform")
layout.separator()
col_align = layout.column(align=True)
col_align.alignment = 'EXPAND'
row_align = col_align.row(align=True)
#row_align.label(text="Curve Tilt")
#row_align.separator()
row_align.prop(ob, "ASKETCH_tilt", slider=True)
#row_align = col_align.row(align=True)
#row_align.prop(ob, 'ASKETCH_smooth', slider=True)
row_align = col_align.row(align=True)
row_align.prop(ob, "ASKETCH_twist_mode", text = "", icon = "MAN_ROT")
row_align.operator("object.asketch_normalise_tilt", text="Normalise Tilt")
else:
row_edit.prop(scn, "SCENE_stroke_size", slider = True)
row_edit.prop(scn, "SCENE_stroke_element_offset", slider = True)
row_edit.prop(scn, "SCENE_stroke_central_size", slider = True)
row_edit.operator("object.asketch_normalise_options", text="Normalise")
row_edit.separator()
col_origin = layout.row(align=True)
col_origin.alignment = 'EXPAND'
col_origin.prop(scn, "SCENE_origin_point", text = "", icon = "CURSOR")
col_origin.prop(scn, "SCENE_origin_update", text="", toggle = True, icon = "ALIGN", icon_only = True)
col_origin.separator()
layout.label(text="Stroke Options")
col_edit = layout.row(align=True)
row_edit= col_edit.row(align=True)
row_edit.alignment = 'EXPAND'
row_edit.prop(scn, "SCENE_x_mirror_on")
row_edit.prop(scn, "SCENE_connect_elements")
row_edit = layout.row(align=True)
row_edit.prop(scn, "SCENE_object_curve")
row_edit.prop(scn, "SCENE_lock_transform")
layout.separator()
# Generates the UI panel inside the 3D view
class VIEW3D_PT_tools_ASKETCH_Convert(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_context = "objectmode"
bl_label = "Convert Sketch"
bl_category = "Grease Pencil"
def draw(self, context):
layout = self.layout
scn = context.scene
ob = context.object
col_convert = layout.column(align=True)
col_convert.alignment = 'EXPAND'
#row_convert = col_convert.row(align=True)
#row_convert.label(text="Convert Using Metaballs")
#col_convert.separator()
#row_convert = col_convert.row(align=True)
#row_convert.operator("object.asketch_strokes_to_metaballs", text="Step 1")
#row_convert.operator("object.asketch_metaballs_rename", text="Step 2")
#row_convert.operator("object.asketch_metaballs_to_mesh", text="Step 3")
#col_convert.separator()
row_convert= col_convert.column(align=True)
row_convert.operator("object.asketch_strokes_to_meshes", text="Convert to Mesh")
#row_convert.operator("object.asketch_strokes_to_meshes", text="Convert using Boolean")
col_convert.separator()
row_vis = layout.column(align=True)
row_vis.alignment = 'EXPAND'
#row_vis.label(text="Visibility")
#row_vis.operator("object.asketch_toggle_mesh_visibility", text="Toggle Mesh Visibility")
#row_vis.separator()
def DuplicateObject(target, targetLocation):
#### Select and make target active
bpy.ops.object.select_all(action='DESELECT')
bpy.context.scene.objects.active = bpy.data.objects[target.name]
bpy.ops.object.select_pattern(pattern=target.name)
# Duplicate the object
bpy.ops.object.duplicate_move()
# Now switch the active object to the duplicate
target = bpy.context.active_object
target.location = targetLocation.location
def FocusObject(targetName):
#### Select and make target active
bpy.ops.object.select_all(action='DESELECT')
bpy.context.scene.objects.active = bpy.data.objects[targetName]
bpy.ops.object.select_pattern(pattern=targetName)
class VIEW3D_PT_tools_ASKETCH_editmode(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_context = "curve_edit"
bl_label = "Arrays Sketching"
bl_category = "Grease Pencil"
@classmethod
def poll(self, context):
return context.active_object
def draw(self, context):
layout = self.layout
ob = context.object
col = layout.column(align=True)
col.operator("object.asketch_stroke_editmode_exit", text="Return to Object Mode")
col.separator()
col.label(text="Stroke Tools")
col.operator("object.asketch_stroke_smooth_size", text="Smooth Size")
#//////////////////////////////// - Normalise UI - //////////////////////////
class ASKETCH_normalise_options(bpy.types.Operator):
"""Resets the stroke option variables back to default settings. Useful if you need to sketch using the initial properties of the brush."""
bl_idname = "object.asketch_normalise_options"
bl_label = "Normalise Settings"
def execute(self, context):
print(self)
self.ASKETCH_stroke_size = 1;
self.ASKETCH_stroke_element_offset = 1;
self.ASKETCH_stroke_central_size = 1;
Update_Normalise(self, context)
return {'FINISHED'}
class ASKETCH_normalise_tilt(bpy.types.Operator):
"""Resets the stroke option variables back to default settings. Useful if you need to sketch using the initial properties of the brush."""
bl_idname = "object.asketch_normalise_tilt"
bl_label = "Normalise Tilt"
def execute(self, context):
print(self)
self.ASKETCH_tilt = 0.0
self.ASKETCH_tilt_old = 0.0
Update_NormaliseTilt(self, context)
return {'FINISHED'}
#//////////////////////////////// - GPencil Management - ///////////////////////////
class GPencil_Clear_Data(bpy.types.Operator):
"""Clears the Grease Pencil data currently displayed"""
bl_idname = "gpencil.asketch_clear_data"
bl_label = "Array Sketch Clear GPencil"
bl_options = {'REGISTER', 'UNDO'}
#This code was graciously pinched from the Sculpt Tools addon :D
#@classmethod
#def poll(cls, context):
# return context.active_object is not None
def execute(self, context):
if not context.scene.grease_pencil == None:
context.scene.grease_pencil.clear()
for obj in context.scene.objects:
if not context.scene.objects[obj.name].grease_pencil == None:
context.scene.objects[obj.name].grease_pencil.clear()
return {'FINISHED'}
#//////////////////////////////// - Set Curve Data - ///////////////////////////
def ASKETCH_SetStrokeRadius(curveData, strokeSize, centralSize):
# Get brush point data
points = bpy.data.curves[curveData.name].splines[0].bezier_points
central_point = int(len(points)/2)
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_first()
#this selects the first point because it is a toggle function (becuase we have deselected everything, )
bpy.ops.curve.de_select_last()
bpy.ops.curve.radius_set(radius = strokeSize)
#Patrick's Version
bpy.ops.curve.select_all(action = "DESELECT")
points[int(len(points)/2)].select_control_point = True
bpy.ops.object.editmode_toggle()
bpy.ops.object.editmode_toggle() #I'm not sure if toggling in/out of editmode is still necessary for selected points to update. but I'm not risking it :-)
#from the above, just the middle point should have been selected, so now we can set the flatten radius
bpy.ops.curve.radius_set(radius = strokeSize * centralSize)
#now that the first, middle and last have their radii set appropriately, smooth out the points ibnetween by....
bpy.ops.curve.select_all(action="INVERT") #selecing everything but the middle
bpy.ops.curve.de_select_first() #deselecting the first
bpy.ops.curve.de_select_last() #deselecting the last (remember they were selected at this point and this operator toggles)
bpy.ops.curve.smooth_radius() #do we need to specify any iterations here?
bpy.ops.curve.select_all(action="DESELECT")
def ASKETCH_SetStrokeTilt(curveData, strokeTilt):
# Get brush point data
points = bpy.data.curves[curveData.name].splines[0].bezier_points
central_point = int(len(points)/2)
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_first()
bpy.ops.curve.de_select_last()
for checkPoints in bpy.data.curves[curveData.name].splines[0].bezier_points:
checkPoints.tilt = strokeTilt #+ checkPoints.tilt
#Patrick's Version
bpy.ops.curve.select_all(action = "DESELECT")
points[int(len(points)/2)].select_control_point = True
bpy.ops.object.editmode_toggle()
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="INVERT")
bpy.ops.curve.de_select_first()
bpy.ops.curve.de_select_last()
bpy.ops.curve.smooth_radius()
bpy.ops.curve.select_all(action="DESELECT")
#//////////////////////////////// - OBJECT ORIGIN- ////////////////////////////////
def ASKETCH_SetObjectOrigin(object, enum, context):
print("Inside ASKETCH_SetObjectOrigin")
# Set to COM
if enum == 2:
print("Setting to COM")
# Enter the curve!
stroke_curve_name = object.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Select everything
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="SELECT")
bpy.ops.curve.de_select_first()
# Saves the current cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Temporarily remove the Copy Location Constraint
FocusObject(stroke_curve_name)
bpy.ops.object.constraints_clear()
# Now give the curve the same location as the object
FocusObject(stroke_curve_name)
bpy.context.object.location = object.location
# Set the origin
bpy.ops.object.origin_set(type ='ORIGIN_CURSOR')
# Move the object to the curve
FocusObject(object.name)
bpy.context.object.location = bpy.data.objects[stroke_curve_name].location
# Now just re-apply the constraint!
FocusObject(stroke_curve_name)
bpy.ops.object.constraint_add(type='COPY_LOCATION')
bpy.data.objects[stroke_curve_name].constraints["Copy Location"].target = object
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
# Set to Curve Start
elif enum == 3:
print("Setting to First")
# Enter the curve!
stroke_curve_name = object.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Only select the beginning curve point
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_first()
# Saves the current cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Temporarily remove the Copy Location Constraint
FocusObject(stroke_curve_name)
bpy.ops.object.constraints_clear()
# Now give the curve the same location as the object
FocusObject(stroke_curve_name)
bpy.context.object.location = object.location
# Set the origin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# Move the object to the curve
FocusObject(object.name)
bpy.context.object.location = bpy.data.objects[stroke_curve_name].location
# Now just re-apply the constraint!
FocusObject(stroke_curve_name)
bpy.ops.object.constraint_add(type='COPY_LOCATION')
bpy.data.objects[stroke_curve_name].constraints["Copy Location"].target = object
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
# Set to Curve End
elif enum == 4:
print("Setting to Last")
# Enter the curve!
stroke_curve_name = object.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
curve_data = bpy.context.object.data
# Only select the beginning curve point
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_last()
# Saves the current cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor and set the origin!
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Temporarily remove the Copy Location Constraint
FocusObject(stroke_curve_name)
bpy.ops.object.constraints_clear()
# Now give the curve the same location as the object
FocusObject(stroke_curve_name)
bpy.context.object.location = object.location
# Set the origin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# Move the object to the curve
FocusObject(object.name)
bpy.context.object.location = bpy.data.objects[stroke_curve_name].location
# Now just re-apply the constraint!
FocusObject(stroke_curve_name)
bpy.ops.object.constraint_add(type='COPY_LOCATION')
bpy.data.objects[stroke_curve_name].constraints["Copy Location"].target = object
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
def ASKETCH_SetSceneOrigin(curve, enum, active_object_name, context):
# Set to Active Object
if enum == 1:
print("Changing origin to object")
# Select the object by name and change the location
if active_object_name is not "None":
# Focus the curve
FocusObject(curve.name)
#Obtains the current cursor location and previous 3 cursor locations.
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
bpy.data.scenes[bpy.context.scene.name].cursor_location = bpy.context.scene.objects[active_object_name].location
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
# Set to Cursor
elif enum == 2:
print("Changing origin to cursor")
# Focus the curve
FocusObject(curve.name)
# Save the previous cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = bpy.context.scene.cursor_location
# Set the origin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
#FocusObject(stroke_curve_name)
#dbpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# Restore the original cursor location
bpy.context.scene.cursor_location = previous_cursor_loc
# Set to COM
elif enum == 3:
print("Changing origin to COM")
# Focus the curve
FocusObject(curve.name)
curve_data = bpy.context.object.data
# Select everything
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="SELECT")
# Save the previous cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Set the origin
bpy.ops.object.origin_set(type ='ORIGIN_CURSOR')
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
# Set to Curve Start
elif enum == 4:
print("Changing origin to Start")
# Focus the curve
FocusObject(curve.name)
curve_data = bpy.context.object.data
# Only select the beginning curve point
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_first()
# Save the previous cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Set the origin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
# Set to Curve End
elif enum == 5:
print("Changing origin to End")
# Focus the curve
FocusObject(curve.name)
curve_data = bpy.context.object.data
# Only select the beginning curve point
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.curve.de_select_last()
# Save the previous cursor location
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Snap the cursor and set the origin!
bpy.ops.view3D.snap_cursor_to_selected()
bpy.ops.curve.select_all(action="DESELECT")
bpy.ops.object.editmode_toggle()
# Set the origin
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# Restore the original cursor location
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
#//////////////////////////////// - DRAW STROKE - ////////////////////////////////
# Draw the Stroke
class ASKETCH_StrokeDraw(bpy.types.Operator):
"""Creates the stroke object using a grease stroke or selected curve, and provided brushes and cap settings"""
bl_idname = "gpencil.asketch_stroke_draw"
bl_label = "Array Sketch Stroke Draw"
# For some reason this doesn't work, just leave it out....
# //////////////////// - FIX ME SOMEHOW PLEASE - /////////////////////
#stroke_size = bpy.props.FloatProperty(name="Stroke Size", description="Size of the stroke", default = stroke_size)
#stroke_central_size = bpy.props.FloatProperty(name="Stroke Central Size", description="Size of the middle of the stroke", default = stroke_central_size)
#dddstroke_elements_offset = bpy.props.FloatProperty(name="Stroke Elements Distance", description="Distance between elements of the stroke", default = stroke_elements_offset)
#this just adds some string text to the current object name and then increases an index so that we can keep track of our strokes
def append_stroke_number(self, partial_name):
n = 1
while True:
name_stroke_obj = partial_name + ".SKO" + str(n)
name_stroke_curve = partial_name + ".SKC" + str(n)
if (not name_stroke_obj in bpy.data.objects and not name_stroke_curve in bpy.data.objects):
break
n += 1
return name_stroke_curve
#Class Method is used to activate the poll definition, which can be used to disable a functiom
@classmethod
def poll(cls, context):
return context.scene.ASKETCH_brush_object != "None"
#return context.active_object.name.find(".SKO") != -1 or context.active_object.name.find(".SKO") != -1:
#return context.active_object is not None and context.active_object.mode == 'OBJECT' and context.active_object.type == 'MESH'
# The main function body
def execute(self, context):
selection_count = len(bpy.context.selected_objects)
use_curve = False
print("-"*40)
continue_process = True
smooth_curve = True
while continue_process:
# If theres an active, unselected object, use the data in the scene and just select the object
if selection_count == 0:
print("DRAW STROKE: Found active object, creating curve")
if bpy.context.gpencil_data is None:
self.report({'WARNING'},
'No grease pencil data found or curve selected. Start drawing!')
continue_process = False
return {'FINISHED'}
bpy.ops.object.select_all(action='DESELECT') # Deselect everything
bpy.ops.gpencil.convert(type='CURVE') # Convert the active grease pencil to Curve
bpy.ops.gpencil.active_frame_delete() # Clear GPencil Data
continue_process = False
# If there are both selected and active objects, use the active object to retrieve data
elif selection_count >= 1:
print("DRAW STROKE: Found selections and active object, creating curve")
if bpy.context.active_object.type == 'CURVE':
stroke_obj_name = bpy.context.active_object.name
if (stroke_obj_name.find(".SKC") == -1):
print("Using a curve for the Stroke Object!")
print("Rawr?")
use_curve = True
smooth_curve = False
continue_process = False
else:
self.report({'WARNING'},
'Array Sketch Selected. Select a normal curve to begin')
return {'FINISHED'}
elif bpy.context.gpencil_data is None:
self.report({'WARNING'},
'No grease pencil data found or curve selected. Start drawing!')
continue_process = False
return {'FINISHED'}
else:
# Grab the Gpencil data from the selected and active object
gpencil_target = bpy.context.gpencil_data
bpy.ops.object.select_all(action='DESELECT') # Deselect everything
bpy.context.scene.grease_pencil = gpencil_target # Switch GPencil Data
bpy.ops.gpencil.convert(type='CURVE') # Convert the active grease pencil
bpy.ops.gpencil.active_frame_delete() # Clear GPencil Data
continue_process = False
else:
self.report({'WARNING'}, "Something broke. :C")
continue_process = False
return {'FINISHED'}
# The curve is now selected but not active. Were gonna make it active!
for obj in bpy.context.selected_objects:
obj.name = "ASKETCH Curve"
bpy.context.scene.objects.active = obj
# Keep a location of the curve object
curve_obj = bpy.context.object
# Checking active object :3
#print("-"*40)
#print("Object Focus:")
#print(bpy.context.object.name)
#print("-"*40)
## Enter Edit Mode
bpy.ops.object.editmode_toggle()
## Select all points in the curve and set the radius
bpy.ops.curve.select_all(action="SELECT")
# Set the radius of the curve
bpy.ops.curve.radius_set(radius=1)
curve_data = bpy.context.object.data # Obtain curve data
if (smooth_curve) :
bpy.ops.curve.spline_type_set(type="BEZIER")
bpy.ops.curve.handle_type_set(type="AUTOMATIC") # Change curve spline + handle types
bpy.data.curves[curve_data.name].use_stretch = True
#here i updated to to .show_handles and .show_normal_face
# Hides the handles and other details that are unnecessary for this plugin
bpy.data.curves[curve_data.name].show_handles = False
bpy.data.curves[curve_data.name].show_normal_face = False
bpy.data.curves[curve_data.name].use_path = True
#I added .use_deform_bounds = True becuase it is false by default and that was causing me great trouble
bpy.data.curves[curve_data.name].use_deform_bounds = True
# smoothsmoothsmoothsmoothsmooth
if (smooth_curve):
print("Smoothing Curve")
bpy.ops.curve.smooth()
bpy.ops.curve.smooth()
bpy.ops.curve.smooth()
bpy.ops.curve.smooth()
bpy.ops.curve.smooth()
bpy.ops.curve.smooth()
# Sets a location for the active object and object data
stroke_curve_obj = bpy.context.active_object
stroke_curve_data = bpy.context.active_object.data
# Clear all animation keyframes generates from the GPencil conversion
stroke_curve_data.animation_data_clear()
stroke_curve_obj.name = self.append_stroke_number("ASKETCH Curve")
#### Inflate stroke.
ASKETCH_SetStrokeRadius(stroke_curve_data, self.stroke_size, self.stroke_central_size)
bpy.ops.object.editmode_toggle() #Exit Edit Modee
### Set curve's interpolation to "Cardinal"
stroke_curve_obj.data.splines[0].radius_interpolation = "CARDINAL"
#### Set Curve's origin to the position of the main object's origin.
#Obtains the current cursor location and previous 3 cursor locations.
#cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
# This is extracting the float array of the current cursor locaion
#previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
# Select the object by name and change the location
#if self.main_object is not None:
# bpy.ops.object.select_pattern(pattern=stroke_curve_obj.name)
# bpy.context.scene.objects.active = bpy.context.scene.objects[stroke_curve_obj.name]
# bpy.data.scenes[bpy.context.scene.name].cursor_location = self.main_object.location
# bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
# bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
#If no object is selected, place the origin point where the cursor is.
#else:
# bpy.ops.object.select_pattern(pattern=stroke_curve_obj.name)
# bpy.context.scene.objects.active = bpy.context.scene.objects[stroke_curve_obj.name]
#
# bpy.data.scenes[bpy.context.scene.name].cursor_location = cursor_loc
# bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
selected_origin = int(self.origin_point)
print("-"*40)
print(selected_origin)
print("Active Object:")
print(self.main_object)
print("-"*40)
# More origin changes here, or it will break :P
if self.main_object is not None:
if self.main_object.name.find(".SKO"):
ASKETCH_SetSceneOrigin(stroke_curve_obj, selected_origin, "None", context)
else:
ASKETCH_SetSceneOrigin(stroke_cudrve_obj, selected_origin, self.main_object.name, context)
else:
ASKETCH_SetSceneOrigin(stroke_curve_obj, selected_origin, "None", context)
#Now duplicate yoself.
DuplicateObject(self.brush_object, stroke_curve_obj)
stroke_brush_obj = bpy.context.active_object
stroke_brush_obj.name = stroke_curve_obj.name.replace(".SKC", ".SKO", 2)
FocusObject(stroke_brush_obj.name)
### Add Array modifier to the brush-object and make it follow the curve
if self.curve_object is False:
print("Curve Object Not Active")
bpy.ops.object.modifier_add(type='ARRAY')
if self.connect_elements:
print("Merge Elements Active")
stroke_brush_obj.modifiers['Array'].use_merge_vertices = True
stroke_brush_obj.modifiers['Array'].use_merge_vertices_cap = True
stroke_brush_obj.modifiers['Array'].merge_threshold = 1.0
# Modifies the Array attributes
stroke_brush_obj.modifiers["Array"].relative_offset_displace = [self.stroke_elements_offset, 0, 0]
stroke_brush_obj.modifiers["Array"].fit_type = "FIT_CURVE"
stroke_brush_obj.modifiers["Array"].curve = stroke_curve_obj
# If theres going to be a start
# ?????????????????
if bpy.context.scene.ASKETCH_start_cap != "None":
stroke_brush_obj.modifiers["Array"].start_cap = bpy.data.objects[self.brush_start_cap]
if bpy.context.scene.ASKETCH_end_cap != "None":
stroke_brush_obj.modifiers["Array"].end_cap = bpy.data.objects[self.brush_end_cap]
# Adds and modifies the Curve attributes
FocusObject(stroke_brush_obj.name)
bpy.ops.object.modifier_add(type='CURVE')
stroke_brush_obj.modifiers["Curve"].object = stroke_curve_obj
#### Add Mirror modifier if activated
if self.mirror_x_on is True:
bpy.ops.object.modifier_add(type='MIRROR')
#If an object is selected, mirror it through that
if self.main_object is not None:
stroke_brush_obj.modifiers["Mirror"].mirror_object = self.main_object
# Make sure the curve is selected
FocusObject(stroke_curve_obj.name)
# Now lock the location
bpy.ops.object.constraint_add(type='COPY_LOCATION')
stroke_curve_obj.constraints["Copy Location"].target = stroke_brush_obj
## Lock movement of the stroke's object and curve.
if self.lock_transform is True:
bpy.data.objects[stroke_brush_obj.name].lock_location[0] = True
bpy.data.objects[stroke_brush_obj.name].lock_location[1] = True
bpy.data.objects[stroke_brush_obj.name].lock_location[2] = True
bpy.data.objects[stroke_curve_obj.name].lock_location[0] = True
bpy.data.objects[stroke_curve_obj.name].lock_location[1] = True
bpy.data.objects[stroke_curve_obj.name].lock_location[2] = True
## Set main object as active.
if self.main_object is not None:
bpy.ops.object.select_pattern(pattern=self.main_object.name)
bpy.context.scene.objects.active = self.main_object
# Set the curve mesh as the selected and active object
FocusObject(stroke_brush_obj.name)
# Turn off auto-updating until the variables have been passed
bpy.context.scene.ASKETCH_live_update = False
# Now set the current custom variables to the new object
stroke_brush_obj.ASKETCH_x_mirror_on = self.mirror_x_on
stroke_brush_obj.ASKETCH_connect_elements = self.connect_elements
stroke_brush_obj.ASKETCH_object_curve = self.curve_object
stroke_brush_obj.ASKETCH_lock_transform = self.lock_transform
stroke_brush_obj.ASKETCH_stroke_size = self.stroke_size
stroke_brush_obj.ASKETCH_stroke_central_size = self.stroke_central_size
stroke_brush_obj.ASKETCH_stroke_element_offset = self.stroke_elements_offset
bpy.context.scene.ASKETCH_live_update = True
if selected_origin is 1 or selected_origin is 2:
stroke_brush_obj.ASKETCH_origin_point = "1"
elif selected_origin is 3:
stroke_brush_obj.ASKETCH_origin_point = "2"
elif selected_origin is 4:
stroke_brush_obj.ASKETCH_origin_point = "3"
elif selected_origin is 5:
stroke_brush_obj.ASKETCH_origin_point = "4"
print(int(selected_origin))
print(int(stroke_brush_obj.ASKETCH_origin_point))
# Hide the curve to keep the appearance of geometry clean.
# stroke_curve_obj.hide = True
def invoke(self, context, event):
self.main_object = bpy.context.object
##-------------------------------------------------------------------------------------------
# Checks with each object provided to see if it exists. If it doesnt, it brings up a warning
# and stops the code from commencing any further
if bpy.data.objects.get(bpy.context.scene.ASKETCH_brush_object) is not None:
self.brush_object = bpy.context.scene.objects[bpy.context.scene.ASKETCH_brush_object]
else:
self.report({'WARNING'},
'Brush Object given does not exist, please use a name of an object that exists! :3')
return {'FINISHED'}
#print(bpy.context.scene.ASKETCH_start_cap)
#print(self.main_object.name)
if bpy.context.scene.ASKETCH_start_cap != "None":
if bpy.data.objects.get(bpy.context.scene.ASKETCH_start_cap) is not None:
self.brush_start_cap = bpy.context.scene.ASKETCH_start_cap
else:
self.report({'WARNING'},
'Start Cap given does not exist, please use a name of an object that exists or use nothing at all! :3')
return {'FINISHED'}
if bpy.context.scene.ASKETCH_end_cap != "None":
if bpy.data.objects.get(bpy.context.scene.ASKETCH_end_cap) is not None:
self.brush_end_cap = bpy.context.scene.ASKETCH_end_cap
else:
self.report({'WARNING'},
'End Cap given does not exist, please use a name of an object that exists or use nothing at all! :3')
return {'FINISHED'}
cheap_check = False
print("-"*40)
if bpy.context.selected_objects is not None and bpy.context.active_object is not None:
if bpy.context.object.name.find(".SKO") != -1 and context.scene.ASKETCH_live_update is True:
print("Using object content")
cheap_check = True
self.mirror_x_on = bpy.context.object.ASKETCH_x_mirror_on
self.connect_elements = bpy.context.object.ASKETCH_connect_elements
self.curve_object = bpy.context.object.ASKETCH_object_curve
self.lock_transform = bpy.context.object.ASKETCH_lock_transform
self.stroke_size = bpy.context.object.ASKETCH_stroke_size
self.stroke_central_size = bpy.context.object.ASKETCH_stroke_central_size
self.stroke_elements_offset = bpy.context.object.ASKETCH_stroke_element_offset
self.origin_point = bpy.context.object.ASKETCH_origin_point
if cheap_check is not True:
print("Using scene content")
self.mirror_x_on = bpy.context.scene.SCENE_x_mirror_on
self.connect_elements = bpy.context.scene.SCENE_connect_elements
self.curve_object = bpy.context.scene.SCENE_object_curve
self.lock_transform = bpy.context.scene.SCENE_lock_transform
self.stroke_size = bpy.context.scene.SCENE_stroke_size
self.stroke_central_size = bpy.context.scene.SCENE_stroke_central_size
self.stroke_elements_offset = bpy.context.scene.SCENE_stroke_element_offset
self.origin_point = bpy.context.scene.SCENE_origin_point
self.execute(context)
return {"FINISHED"}
# Enter "Stroke-Editmode"
class ASKETCH_Stroke_Editmode(bpy.types.Operator):
"""Enter the edit mode of the stroke object"""
bl_idname = "object.asketch_stroke_editmode"
bl_label = "Array Sketch Stroke Editmode"
@classmethod
def poll(cls, context):
# Fail test is used to ensure that if any object selected is a Curve object, Edit Stroke cant be used.
fail_test = True
for obj in context.selected_objects:
if obj.name.find(".SKO") == -1:
fail_test = False
return fail_test
def execute(self, context):
stroke_obj_name = bpy.context.object.name
if (stroke_obj_name.find(".SKO") != -1):
name_to_query = bpy.context.object.name.replace(".SKO", ".SKC")
if bpy.context.scene.objects[name_to_query]:
bpy.ops.object.select_pattern(pattern=name_to_query)
bpy.context.scene.objects.active = bpy.context.scene.objects[name_to_query]
bpy.ops.object.editmode_toggle()
return {"FINISHED"}
#--------------- EXIT EDIT MODE -----------------------------------
class ASKETCH_Stroke_EditmodeExit(bpy.types.Operator):
"""Exit edit mode for the stroke object"""
bl_idname = "object.asketch_stroke_editmode_exit"
bl_label = "Array Sketch Stroke Exit Editmode"
def execute(self, context):
#Toggle out of edit mode
bpy.ops.object.editmode_toggle()
stroke_curve = bpy.context.object
bpy.ops.object.select_all(action='DESELECT')
#Find the .SKC equivalent
stroke_object_name = stroke_curve.name.replace(".SKC", ".SKO")
FocusObject(stroke_object_name)
stroke = bpy.context.object
#print(stroke_object_name)
#If it's in the scene objects, select it.
if bpy.context.scene.objects[stroke_object_name]:
#FocusObject(stroke.name)
print("-"*40)
print("Am i really here? 0___0")
print("-"*40)
# If the cursor needs updating, grab the object and update it.
if context.scene.SCENE_origin_update is True:
#FocusObject(stroke.name)
enum = int(bpy.context.object.ASKETCH_origin_point)
print("Rawr")
print("-"*40)
print("Going to object origin!")
print(int(stroke.ASKETCH_origin_point))
print(int(3.4))
print("-"*40)
print("Going to UpdateObjectOrigin")
ASKETCH_SetObjectOrigin(stroke, enum, context)
bpy.ops.object.select_all(action='DESELECT')
#splitted_name = bpy.context.object.name.split(".SKC")
#main_object_name = splitted_name[0]
#main_object_name = bpy.context.object.parent.name
#if bpy.context.scene.objects[main_object_name]:
# bpy.ops.object.select_pattern(pattern=main_object_name)
# bpy.context.scene.objects.active = bpy.context.scene.objects[main_object_name]
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- TOGGLE EDIT MODE -----------------------------------
class ASKETCH_Stroke_EditmodeToggle(bpy.types.Operator):
bl_idname = "gpencil.asketch_stroke_editmode_toggle"
bl_label = "Array Sketch Stroke Editmode Toggle"
def execute(self, context):
if self.stroke_obj.name.find(".SKO") != -1:
deformer_curve_name = self.stroke_obj.name.replace(".SKO", ".SKC")
if deformer_curve_name in bpy.data.objects:
curve = bpy.data.objects[deformer_curve_name]
#curve.data.restrict_view = False
# Not sure if this will work
FocusObject(deformer_curve_name)
#bpy.ops.object.select_pattern(pattern=deformer_curve_name)
#bpy.context.scene.objects.active = bpy.context.scene.objects[deformer_curve_name]
bpy.ops.object.editmode_toggle()
elif self.stroke_obj.name.find(".SKC") != -1:
if bpy.context.edit_object == self.stroke_obj:
bpy.ops.object.editmode_toggle()
#bpy.data.objects[self.stroke_obj.name].restrict_view = True
#splitted_name = bpy.context.object.name.split(".SKC")
#main_object_name = splitted_name[0]
main_object_name = bpy.context.object.parent.name
if main_object_name in bpy.data.objects:
#Also not sure if this will work
FocusObject(main_object_name)
#bpy.ops.object.select_pattern(pattern=main_object_name)
#bpy.context.scene.objects.active = bpy.context.scene.objects[main_object_name]
def invoke (self, context, event):
self.stroke_obj = bpy.context.object
self.execute(context)
return {"FINISHED"}
#--------------- SET BRUSH OBJECT -----------------------------------
class ASKETCH_SetBrushObject(bpy.types.Operator):
"""Sets the brush object to the active object in the 3D View. Stroke objects cannot be used."""
bl_idname = "object.asketch_set_brush_object"
bl_label = "Array Sketch Set Brush Object"
def execute(self, context):
bpy.context.scene.ASKETCH_brush_object = bpy.context.active_object.name
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- SET START CAP -----------------------------------
class ASKETCH_SetStartCap(bpy.types.Operator):
"""Sets the object that marks the beginning of the curve using the active object in the 3D View. Deleting the original object will also remove the cap from the stroke until converted."""
bl_idname = "object.asketch_set_start_cap"
bl_label = "Array Sketch Set Start Cap"
def execute(self, context):
bpy.context.scene.ASKETCH_start_cap = bpy.context.active_object.name
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- SET END CAP -----------------------------------
class ASKETCH_SetEndCap(bpy.types.Operator):
"""Sets the object that marks the end of the curve using the active object in the 3D View. Deleting the original object will also remove the cap from the stroke until converted."""
bl_idname = "object.asketch_set_end_cap"
bl_label = "Array Sketch Set End Cap"
def execute(self, context):
bpy.context.scene.ASKETCH_end_cap = bpy.context.active_object.name
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- CLEAR BRUSH OBJECT -----------------------------------
class ASKETCH_ClearBrushObject(bpy.types.Operator):
"""Clears the currently chosen object"""
bl_idname = "object.asketch_clear_brush_object"
bl_label = "Array Sketch Set Brush Object"
def execute(self, context):
bpy.context.scene.ASKETCH_brush_object = "None"
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- CLEAR START CAP -----------------------------------
class ASKETCH_ClearStartCap(bpy.types.Operator):
"""Clears the currently chosen object"""
bl_idname = "object.asketch_clear_start_cap"
bl_label = "Array Sketch Set Start Cap"
def execute(self, context):
bpy.context.scene.ASKETCH_start_cap = "None"
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- CLEAR END CAP -----------------------------------
class ASKETCH_ClearEndCap(bpy.types.Operator):
"""Clears the currently chosen object"""
bl_idname = "object.asketch_clear_end_cap"
bl_label = "Array Sketch Set End Cap"
def execute(self, context):
bpy.context.scene.ASKETCH_end_cap = "None"
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- DELETE SELECTED STROKES-----------------------------------
# Delete selected strokes (or last grease pencil stroke)
class ASKETCH_DeleteStrokes(bpy.types.Operator):
"""Deletes the selected strokes."""
bl_idname = "object.asketch_delete_strokes"
bl_label = "Array Sketch Delete Strokes"
@classmethod
def poll(cls, context):
for obj in context.selected_objects:
return obj.name.find(".SKO") != -1 or obj.name.find(".SKO") != -1
def object_type(self, obj):
if (obj.name.find(".SKO") != -1):
obj_type = "stroke_object"
elif (obj.name.find(".SKC") != -1):
obj_type = "stroke_curve"
else:
obj_type = "main_object"
return obj_type
def delete_stroke(self, stroke_object):
#if (stroke_object.grease_pencil):
# if (stroke_object.grease_pencil.layers[0]):
# gp_layers = stroke_object.grease_pencil.layers
# l = None
# for l in gp_layers:
# if l.active:
# break
#
# if (l.active_frame):
# if (len(l.active_frame.strokes) > 0):
# bpy.ops.gpencil.active_frame_delete()
if (self.object_type(stroke_object) == "stroke_object"):
stroke_object_name = stroke_object.name
stroke_curve_name = stroke_object_name.replace(".SKO", ".SKC")
bpy.ops.object.select_pattern(pattern=stroke_curve_name)
bpy.ops.object.select_pattern(pattern=stroke_object_name, extend=True)
bpy.context.scene.objects.active = bpy.data.objects[stroke_object_name]
bpy.ops.object.delete()
def execute(self, context):
#if (bpy.context.object.parent != None):
# main_object_name = bpy.context.object.parent.name
# Get selected objects
strokes_to_delete = []
print("Selected Objects:")
print(len(bpy.context.selected_objects))
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
strokes_to_delete.append(stroke)
print("Objects in Delete Queue:")
print(len(strokes_to_delete))
for stroke in strokes_to_delete:
print("Deleting Stroke:")
print(stroke.name)
FocusObject(stroke.name)
self.delete_stroke(stroke)
# Theres a potential bug here, keep an eye out.
splitted_name = strokes_to_delete[0].name.split(".SKO")
main_object_name = splitted_name[0]
if main_object_name in bpy.data.objects:
bpy.ops.object.select_pattern(pattern=main_object_name)
bpy.context.scene.objects.active = bpy.data.objects[main_object_name]
return {"FINISHED"}
#--------------- SMOOTH CURVE RADIUS-----------------------------------
# Smooth Curve Radius using control points
class ASKETCH_StrokeSmoothSize(bpy.types.Operator):
bl_idname = "object.asketch_stroke_smooth_size"
bl_label = "Array Sketch Smooth Stroke Size"
def execute(self, context):
if(context.active_object.type == 'CURVE' and context.mode == 'EDIT_CURVE'):
bpy.ops.curve.select_all(action="INVERT")
bpy.ops.curve.smooth_radius()
bpy.ops.curve.select_all(action="INVERT")
def invoke (self, context, event):
self.execute(context)
return {"FINISHED"}
#--------------- Strokes to Meshes-----------------------------------
# Convert strokes to meshes.
class ASKETCH_StrokesToMeshes(bpy.types.Operator):
"""Converts the stroke object to a mesh"""
bl_idname = "object.asketch_strokes_to_meshes"
bl_label = "Array Sketch Stroke To Meshes"
@classmethod
def poll(cls, context):
for obj in context.selected_objects:
return obj.name.find(".SKO") != -1 or obj.name.find(".SKO") != -1
def execute(self, context):
scene = bpy.context.scene
# Get selected objects
strokes_to_convert = []
print("Selected Objects:")
print(len(bpy.context.selected_objects))
for stroke in bpy.context.selected_objects:
if stroke.name.find(".SKO") != -1:
strokes_to_convert.append(stroke)
print("Objects in Convert Queue:")
print(len(strokes_to_convert))
for stroke in strokes_to_convert:
print("Converting Stroke:")
print(stroke.name)
#Just select the curve now
FocusObject(stroke.name)
mod_types = {'ARRAY', 'CURVE', 'MIRROR'}
mod_active = [mod.show_viewport for mod in stroke.modifiers]
# THANKS BLENDER ARTISTS USER CoDEmannX for this code!
for mod in stroke.modifiers:
if mod.type not in mod_types:
mod.show_viewport = False
me = stroke.to_mesh(scene, True, 'PREVIEW')
for mod, active in zip(stroke.modifiers, mod_active):
if mod.type in mod_types:
stroke.modifiers.remove(mod)
else:
mod.show_viewport = active
# Note: this only swaps the object's data, but doesn't remove the original mesh
stroke.data = me
# Now find and delete the corresponding curve
stroke_curve_name = stroke.name.replace(".SKO", ".SKC")
FocusObject(stroke_curve_name)
bpy.ops.object.delete()
# Rename the curve to remove it from being considered an Object Sketch
FocusObject(stroke.name)
stroke.name = "ASKETCH Object"
return {"FINISHED"}
#--------------- CONVERT STEP 1-----------------------------------
# Convert strokes to metaballs.
class ASKETCH_StrokesToMetaballs(bpy.types.Operator):
"""Converts all currently selected strokes to a combined mesh, using metaballs. PLEASE NOTE - Press each step button in succession to fully convert the object. This may take a few minutes to complete."""
bl_idname = "object.asketch_strokes_to_metaballs"
bl_label = "Array Sketch Stroke To Metaballs"
@classmethod
def poll(cls, context):
for obj in context.selected_objects:
return obj.name.find(".SKO") != -1 or obj.name.find(".SKO") != -1
def create_metaball(self, point, mball_radius, mball_name):
#Add the ball type metaball
bpy.ops.object.metaball_add(type='BALL', view_align=False, enter_editmode=False, location=(point.co), rotation=(0, 0, 0))
mball_object = bpy.context.object
mball_object.name = mball_name
#bpy.data.objects[mball_object.name].parent = self.main_object
bpy.data.objects[mball_object.name].location += self.main_object.location
mball = bpy.data.metaballs[mball_object.data.name]
mball.resolution = 1
mball.elements[0].radius = mball_radius * 2
mball.elements[0].stiffness = self.mballs_stiffness
return mball_object
def execute(self, context):
#### Be sure that all strokes and their curves are visible
for obj in bpy.data.objects:
if (obj.name.find(self.main_object.name + ".SKO") != -1 or obj.name.find(self.main_object.name + ".SKC") != -1):
bpy.data.objects[obj.name].hide = False
#### If there was a baked mesh, delete it.
baked_mesh_name = self.main_object.name + ".SKME"
if baked_mesh_name in bpy.data.objects:
bpy.data.objects[baked_mesh_name].hide = False
bpy.ops.object.select_pattern(pattern=baked_mesh_name)
bpy.context.scene.objects.active = bpy.data.objects[baked_mesh_name]
bpy.ops.object.delete()
bpy.ops.object.select_pattern(pattern=self.main_object.name)
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
bpy.ops.object.select_pattern(pattern=self.main_object.name)
bpy.context.scene.objects.active = self.main_object
#### Get all curves that will be converted to metaballs, and duplicate and mirror the ones that should be mirrored.
all_strokes_curves = []
for obj in bpy.data.objects:
if obj.name.find(".SKC") != -1:
mirrored_curve = False
stroke_brush_name = obj.name.replace(".SKC", ".SKO")
for mod in bpy.data.objects[stroke_brush_name].modifiers:
if mod.type == "MIRROR" and mod.use_x == True:
mirrored_curve = True
bpy.ops.object.select_pattern(pattern=obj.name)
bpy.context.scene.objects.active = bpy.data.objects[obj.name]
bpy.ops.object.duplicate_move()
bpy.ops.object.editmode_toggle()
bpy.ops.curve.select_all(action='SELECT')
bpy.ops.curve.subdivide()
bpy.ops.curve.subdivide()
bpy.ops.object.editmode_toggle()
bpy.context.object.name = "A_SK_TEMP_CURVE"
# Append the first duplicate.
all_strokes_curves.append(bpy.context.object)
if mirrored_curve:
bpy.ops.object.duplicate_move()
bpy.ops.transform.mirror(proportional='DISABLED', proportional_edit_falloff='SMOOTH', proportional_size=1, constraint_axis=(True, False, False), constraint_orientation='GLOBAL')
bpy.ops.object.transform_apply(scale=True)
bpy.context.object.name = "A_SK_TEMP_CURVE"
# Append the mirrored duplicate
all_strokes_curves.append(bpy.context.object)
#### Create the Metaball object for each curve and set its properties.
strokes_total_time = 0
curves_count = 1
mballs_num = 1
all_mballs = []
for curve_obj in all_strokes_curves:
bpy.ops.object.select_pattern(pattern = curve_obj.name)
bpy.context.scene.objects.active = bpy.data.objects[curve_obj.name]
pts = bpy.data.objects[curve_obj.name].data.splines[0].bezier_points
mballs_count = 0
mballs_start_time = time.time()
first_pt = True
for p in pts:
# Radius of the metaball not less than the minimum wire resolution
if p.radius < self.stroke_wire_resolution:
mball_radius = self.ball_brush_size / 2 * self.mballs_size_compensation * self.stroke_wire_resolution*2
else:
mball_radius = self.ball_brush_size / 2 * self.mballs_size_compensation * p.radius*2
new_mball_created = False
if first_pt:
mball_object = self.create_metaball(p, mball_radius, self.final_mesh_name + str(mballs_num))
new_mball_created = True
first_pt = False
else:
prev_mball = bpy.data.metaballs[prev_mball_object.data.name]
prev_pt_loc = prev_pt.co
pts_difs = [prev_pt_loc[0] - p.co[0], prev_pt_loc[1] - p.co[1], prev_pt_loc[2] - p.co[2]]
pts_distance = abs(sqrt(pts_difs[0] * pts_difs[0] + pts_difs[1] * pts_difs[1] + pts_difs[2] * pts_difs[2]))
# Checks if the distance between the previous point with a metaball and the actual point is long enough to "deserve" a new metaball.
if ((prev_mball.elements[0].radius * self.ball_brush_size + mball_radius) / self.stroke_definition < pts_distance + mball_radius / 10):
mball_object = self.create_metaball(p, mball_radius, self.final_mesh_name + str(mballs_num))
new_mball_created = True
if new_mball_created:
#mball_object.data.threshold = 0
mball_object.data.elements[0].hide = True
all_mballs.append(mball_object)
prev_mball_object = mball_object
prev_pt = p
mballs_num += 1
mballs_count += 1
stroke_time = time.time() - mballs_start_time
strokes_total_time += stroke_time
print("DONE " + str(curves_count) + " strokes of " + str(len(all_strokes_curves)))
#print("Metaballs: " + str(mballs_count) + " Time: " + str(time.time() - mballs_start_time) + "Points: " + str(len(pts)))
print(".............................................. total time: " + str(int(strokes_total_time)) + " seconds")
print("")
curves_count += 1
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
def invoke (self, context, event):
self.main_object = bpy.context.object
self.ball_brush_size = 1
self.mballs_stiffness = 2
self.mballs_size_compensation = 0.9
self.stroke_definition = bpy.context.object.ASKETCH_mball_stroke_definition
self.stroke_wire_resolution = bpy.context.object.ASKETCH_mball_wire_resolution
self.final_mesh_name = self.main_object.name + ".SKMB"
self.x_mirror_on = bpy.context.scene.ASKETCH_x_mirror_on
self.execute(context)
return {"FINISHED"}
#--------------- CONVERT STEP 2-----------------------------------
# Metaballs rename
class ASKETCH_MetaballsRename(bpy.types.Operator):
"""Converts all currently selected strokes to a combined mesh, using metaballs. PLEASE NOTE - Press each step button in succession to fully convert the object. This may take a few minutes to complete."""
bl_idname = "object.asketch_metaballs_rename"
bl_label = "Array Sketch Metaballs Rename"
@classmethod
def poll(cls, context):
for obj in context.selected_objects:
return obj.name.find(".SKO") != -1 or obj.name.find(".SKO") != -1
def execute(self, context):
renamed_mballs_count = 1
for mb in self.metaballs_objects:
mb.data.elements[0].hide = False
mb.name = self.final_mesh_name
print("Meshing metaballs...")
bpy.data.objects[self.final_mesh_name].data.resolution = self.stroke_wire_resolution
bpy.data.objects[self.final_mesh_name].data.threshold = 0.6
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
def invoke (self, context, event):
self.main_object = bpy.context.object
self.stroke_wire_resolution = bpy.context.object.ASKETCH_mball_wire_resolution
self.final_mesh_name = self.main_object.name + ".SKMB"
self.metaballs_objects = []
for ob in bpy.data.objects:
if ob.name.find(self.final_mesh_name) != -1:
self.metaballs_objects.append(ob)
self.execute(context)
return {"FINISHED"}
#--------------- CONVERT STEP 3-----------------------------------
# Convert metaballs to mesh.
class ASKETCH_MetaballsToMesh(bpy.types.Operator):
"""Converts all currently selected strokes to a combined mesh, using metaballs. PLEASE NOTE - Press each step button in succession to fully convert the object. This may take a few minutes to complete."""
bl_idname = "object.asketch_metaballs_to_mesh"
bl_label = "Array Sketch Metaballs To Mesh"
@classmethod
def poll(cls, context):
for obj in context.selected_objects:
return obj.name.find(".SKO") != -1 or obj.name.find(".SKO") != -1
def execute(self, context):
if not self.starting_from_fixed_mesh:
print("STAGE 1 of 4: Converting to Mesh...")
start_time = time.time()
bpy.ops.object.select_pattern(pattern= self.metaballs_object.name)
bpy.context.scene.objects.active = self.metaballs_object
bpy.ops.object.convert(target='MESH', keep_original = False)
print("DONE... Time: " + str(time.time() - start_time) + " seconds")
print("Preparing next stage...")
print("")
mesh_object = bpy.context.selected_objects[0]
bpy.context.scene.objects.active = mesh_object
#### Setting mesh's origin.
cursor_loc = bpy.data.scenes[bpy.context.scene.name].cursor_location
previous_cursor_loc = [cursor_loc[0], cursor_loc[1], cursor_loc[2]]
bpy.ops.object.select_pattern(pattern= mesh_object.name)
bpy.context.scene.objects.active = bpy.context.scene.objects[mesh_object.name]
bpy.data.scenes[bpy.context.scene.name].cursor_location = self.main_object.location
bpy.ops.object.origin_set(type='ORIGIN_CURSOR')
bpy.data.scenes[bpy.context.scene.name].cursor_location = previous_cursor_loc
#### Make it child of the main object
mesh_object.name = self.main_object.name + ".SKMTMP"
mesh_object.parent = self.main_object
mesh_object.location = [0, 0, 0]
#### Delete metaballs
for obj in bpy.data.objects:
if obj.name.find(self.main_object.name + ".SKMB") != -1:
bpy.ops.object.select_pattern(pattern= obj.name)
bpy.context.scene.objects.active = obj
bpy.ops.object.delete()
#### Delete all temporal curves.
for obj in bpy.data.objects:
if obj.name.find("A_SK_TEMP_CURVE") != -1:
bpy.ops.object.select_pattern(pattern= obj.name)
bpy.context.scene.objects.active = bpy.data.objects[obj.name]
bpy.ops.object.delete()
else:
mesh_object = bpy.data.objects[self.temp_mesh.name]
print("STAGE 1 of 4: Converting to Mesh...")
print("Already converted. Preparing next stage...")
print("")
#### Cleaning mesh result.
####################################
FocusObject(mesh_object.name)
#bpy.ops.object.select_pattern(pattern = mesh_object.name)
#bpy.context.scene.objects.active = mesh_object
#### Check if the mesh has non-manifold areas.
#ISSUE LINE HEAR!
print("--------------------------Current Object----------------------")
print(self)
print(dir(self))
print("--------------------------Current Context----------------------")
print(context)
print(dir(context))
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.mesh.select_non_manifold()
bpy.ops.object.editmode_toggle()
is_non_manifold = False
for v in bpy.data.objects[mesh_object.name].data.vertices:
if v.select:
is_non_manifold = True
break
#### If the resulting mesh is non-manifold do the mesh optimizations.
if not is_non_manifold:
#### To keep temporarily a copy of the non-decimated mesh.
non_decimated_object = bpy.context.object
bpy.ops.object.duplicate_move()
# Decimate.
print("STAGE 2 of 4: Decimating...")
start_time = time.time()
bpy.ops.object.modifier_add(type='DECIMATE')
bpy.context.object.modifiers["Decimate"].ratio = 0.02
bpy.ops.object.convert(target='MESH', keep_original = False)
print("STAGE DONE... Time: " + str(time.time() - start_time) + " seconds")
print("Preparing next stage...")
print("")
# Tris to Quads.
print("STAGE 3 of 4: Making all Quads...")
start_time = time.time()
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.mesh.tris_convert_to_quads()
bpy.ops.object.editmode_toggle()
# One level of Subdivision.
bpy.ops.object.modifier_add(type='SUBSURF')
bpy.context.object.modifiers["Subsurf"].levels = 1
bpy.ops.object.convert(target='MESH', keep_original = False)
print("DONE... Time: " + str(time.time() - start_time) + " seconds")
print("Preparing next stage...")
print("")
# Smooth shading for faces
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.faces_shade_smooth()
bpy.ops.object.editmode_toggle()
# Shrinkwrap and smooth results to the non-decimated mesh.
print("STAGE 4 of 4: Fitting...")
start_time = time.time()
bpy.ops.object.modifier_add(type='SHRINKWRAP')
bpy.context.object.modifiers["Shrinkwrap"].wrap_method = "PROJECT"
bpy.context.object.modifiers["Shrinkwrap"].use_negative_direction = True
bpy.context.object.modifiers["Shrinkwrap"].use_positive_direction = True
bpy.context.object.modifiers["Shrinkwrap"].cull_face = 'FRONT'
bpy.context.object.modifiers["Shrinkwrap"].target = non_decimated_object
bpy.ops.object.convert(target='MESH', keep_original = False)
print("DONE... Time: " + str(time.time() - start_time) + " seconds")
print("")
# Add Multires
bpy.ops.object.modifier_add(type='MULTIRES')
bpy.context.object.modifiers["Multires"].show_only_control_edges = True
#### Name the resulting mesh.
bpy.context.object.name = self.main_object.name + ".SKME"
#### Apply the material of the main object to the new mesh
if len(bpy.data.objects[self.main_object.name].material_slots) > 0:
bpy.ops.object.material_slot_add()
bpy.data.objects[bpy.context.object.name].material_slots[0].material = bpy.data.objects[self.main_object.name].materials[0].material
#### Delete non-decimated mesh
bpy.ops.object.select_pattern(pattern= non_decimated_object.name)
bpy.context.scene.objects.active = non_decimated_object
bpy.ops.object.delete()
else:
print("WARNING: There are non-manifold areas in the resulting mesh")
print("(To solve this fix the non-manifold areas (now selected) and then press STEP 3 again")
#### Select main object.
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
#### Hide all strokes
for obj in bpy.data.objects:
if obj.name.find(self.main_object.name + ".SKO") != -1 or obj.name.find(self.main_object.name + ".SKC") != -1:
bpy.data.objects[obj.name].hide = True
def invoke (self, context, event):
#### Check if the resulting mesh with non-manifold areas is selected, to change selection to main object.
if bpy.context.object.name.find(".SKMTMP") != -1:
self.main_object = bpy.context.object.parent
self.final_mesh_name = self.main_object.name + ".SKMB"
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
else:
self.main_object = bpy.context.object
self.final_mesh_name = self.main_object.name + ".SKMB"
#### Check if there is a Metaballs object
if self.main_object.name + ".SKMB" in bpy.data.objects:
self.metaballs_object = bpy.data.objects[self.main_object.name + ".SKMB"]
#### Check if there is a previous (not decimated) mesh.
self.starting_from_fixed_mesh = False
if self.main_object.name + ".SKMTMP" in bpy.data.objects:
self.starting_from_fixed_mesh = True
self.temp_mesh = bpy.data.objects[self.main_object.name + ".SKMTMP"]
self.execute(context)
return {"FINISHED"}
#---------------TOGGLE STROKES/BAKED MESH-----------------------------------
# Toggle visibility between Strokes and "baked" Mesh object.
class ASKETCH_ToggleMeshVisibility(bpy.types.Operator):
bl_idname = "object.asketch_toggle_mesh_visibility"
bl_label = "Array Sketch Smooth Stroke Size"
def execute(self, context):
mesh_obj_name = self.main_object.name + ".SKME"
if mesh_obj_name in bpy.data.objects:
if (bpy.data.objects[mesh_obj_name].hide == True):
bpy.data.objects[mesh_obj_name].hide = False
for obj in bpy.data.objects:
if (obj.name.find(self.main_object.name + ".SKO") != -1 or obj.name.find(self.main_object.name + ".SKC") != -1):
bpy.data.objects[obj.name].hide = True
else:
bpy.data.objects[mesh_obj_name].hide = True
for obj in bpy.data.objects:
if (obj.name.find(self.main_object.name + ".SKO") != -1 or obj.name.find(self.main_object.name + ".SKC") != -1):
bpy.data.objects[obj.name].hide = False
else:
for obj in bpy.data.objects:
if (obj.name.find(self.main_object.name + ".SKO") != -1 or obj.name.find(self.main_object.name + ".SKC") != -1):
bpy.data.objects[obj.name].hide = False
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
def invoke (self, context, event):
if bpy.context.object.name.find(".SKME") != -1:
self.main_object = bpy.data.objects[bpy.context.object.name.split(".SKME")[0]]
bpy.ops.object.select_pattern(pattern= self.main_object.name)
bpy.context.scene.objects.active = self.main_object
else:
self.main_object = bpy.context.object
self.execute(context)
return {"FINISHED"}
#//////////////////////// - REGISTER/UNREGISTER DEFINITIONS - ////////////////////////
def register():
bpy.utils.register_module(__name__)
#kc = bpy.context.window_manager.keyconfigs.addon
#km = kc.keymaps.new(name="3D View", space_type="VIEW_3D")
#keymap_item_stroke_draw = km.keymap_items.new("gpencil.asketch_stroke_draw","G","PRESS", key_modifier="D")
#keymap_item_delete_strokes = km.keymap_items.new("object.asketch_delete_strokes","F","PRESS")
#keymap_item_stroke_smooth_size = km.keymap_items.new("object.asketch_stroke_smooth_size","Y","PRESS")
#keymap_item_stroke_editmode = km.keymap_items.new("gpencil.asketch_stroke_editmode_toggle","TAB","PRESS", key_modifier="D")
def unregister():
bpy.utils.unregister_module(__name__)
#kc = bpy.context.window_manager.keyconfigs.addon
#km = kc.keymaps["3D View"]
#for kmi in km.keymap_items:
# if kmi.idname == 'wm.call_menu':
# if kmi.properties.name == "GPENCIL_OT_ASKETCH_stroke_draw":
# km.keymap_items.remove(kmi)
# print('a')
# elif kmi.properties.name == "OBJECT_OT_ASKETCH_delete_strokes":
# km.keymap_items.remove(kmi)
# print('a')
# elif kmi.properties.name == "OBJECT_OT_ASKETCH_stroke_smooth_size":
# km.keymap_items.remove(kmi)
# print('a')
# elif kmi.properties.name == "OBJECT_OT_ASKETCH_stroke_editmode":
# km.keymap_items.remove(kmi)
# print('a')
# else:
# continue
#//////////////////////////////// - SPACEBAR SEARCH- ////////////////////////////////
if __name__ == "__main__":
register()
| [
"[email protected]"
] | |
e487e448c7d3ccd0a1cb468036b823ff374beadc | 6e2622d1d036ec6306b45c6f221fa7366e27177e | /Day010/Day010Project/calculatorMachine.py | 32e09c233573343cd7b6bfd7e0ee9f9a7a33ae13 | [] | no_license | marcelo-gs/100DaysOfCode_Python | b597ef06e319e24a41d37f3b1ff3ad41f71d3515 | cd3bf34f6168234782935e5313e46801298e4bd0 | refs/heads/master | 2023-02-24T03:43:57.599178 | 2021-02-01T01:06:55 | 2021-02-01T01:06:55 | 321,506,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,548 | py | # import only system from os
from os import system, name
# define our clear function
def clear():
# for windows
if name == 'nt':
_ = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
logo = """
_____________________
| _________________ |
| | Pythonista 0. | | .----------------. .----------------. .----------------. .----------------.
| |_________________| | | .--------------. || .--------------. || .--------------. || .--------------. |
| ___ ___ ___ ___ | | | ______ | || | __ | || | _____ | || | ______ | |
| | 7 | 8 | 9 | | + | | | | .' ___ | | || | / \ | || | |_ _| | || | .' ___ | | |
| |___|___|___| |___| | | | / .' \_| | || | / /\ \ | || | | | | || | / .' \_| | |
| | 4 | 5 | 6 | | - | | | | | | | || | / ____ \ | || | | | _ | || | | | | |
| |___|___|___| |___| | | | \ `.___.'\ | || | _/ / \ \_ | || | _| |__/ | | || | \ `.___.'\ | |
| | 1 | 2 | 3 | | x | | | | `._____.' | || ||____| |____|| || | |________| | || | `._____.' | |
| |___|___|___| |___| | | | | || | | || | | || | | |
| | . | 0 | = | | / | | | '--------------' || '--------------' || '--------------' || '--------------' |
| |___|___|___| |___| | '----------------' '----------------' '----------------' '----------------'
|_____________________|
"""
def add(n1, n2):
return n1 + n2
def subtract(n1, n2):
return n1 - n2
def multiply(n1, n2):
return n1 * n2
def divide(n1, n2):
return n1 / n2
def exponencial(n1, n2):
return n1 ** n2
operations = {
"+": add,
"-": subtract,
"*": multiply,
"/": divide,
"**": exponencial
}
def calculator():
print(logo)
num1 = float(input("What's the first number?: "))
for symbol in operations:
print(symbol)
should_continue = True
while should_continue:
operation_symbol = input("Pick an operation: ")
num2 = float(input("What's the next number?: "))
calculation_function = operations[operation_symbol]
answer = calculation_function(num1, num2)
print(f"{num1} {operation_symbol} {num2} = {answer}")
if input(f"Type 'y' to continue calculating with {answer}, or type 'n' to start a new calculation: ") == 'y':
num1 = answer
else:
should_continue = False
clear()
calculator()
calculator()
| [
"[email protected]"
] | |
2e304e2f166036b259295aee5a9b13ebb9ac238d | f82236cb5a388846ea8c8980ac21d0e11f20171d | /Examination/16_小Q的歌单.py | 596e57a5c660c1cb14ea9489168bdedcaefc8f42 | [] | no_license | cp4011/Algorithms | f2889c219d68b597a38d86899d8ff682e386e649 | e7214e59640cd24d908a6b95d8876c9db9822d8b | refs/heads/master | 2020-04-27T20:21:43.937234 | 2019-08-18T15:12:09 | 2019-08-18T15:12:09 | 174,654,563 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,805 | py | """小Q有X首长度为A的不同的歌和Y首长度为B的不同的歌,现在小Q想用这些歌组成一个总长度正好为K的歌单,每首歌最多只能在歌单中
出现一次,在不考虑歌单内歌曲的先后顺序的情况下,请问有多少种组成歌单的方法。
输入描述:
每个输入包含一个测试用例。
每个测试用例的第一行包含一个整数,表示歌单的总长度K(1<=K<=1000)。
接下来的一行包含四个正整数,分别表示歌的第一种长度A(A<=10)和数量X(X<=100)以及歌的第二种长度B(B<=10)和数量Y(Y<=100)。保证A不等于B。
输出描述:
输出一个整数,表示组成歌单的方法取模。因为答案可能会很大,输出对1000000007取模的结果。
输入例子1:
5
2 3 3 3
输出例子1:
9
"""
'''要组成总长度为K的歌单(有X首长度为A的不同的歌和Y首长度为B的不同的歌,每首歌最多只能在歌单中
出现一次,不考虑顺序),如果从X个A中取出i个A之后的差刚好能够除以B,并且(K-A*i)/B结果小于B的个数Y的话,那么就可以取。
结果的个数为:组合C(X,i)*C(Y,(K-A*i)/B)'''
K = int(input())
A, X, B, Y = map(int, input().split())
maxA = min(K // A, X) # 长为k的歌单中最多需要长度为A的不同歌多少首,与长为A的歌总共有X首 中去较小者
choice = []
for i in range(maxA+1):
if (K-i*A) % B == 0: # 对于K,如果从X个A中取出i个A之后的差刚好能够除以B
tmp = (K-i*A) // B
if tmp <= Y: # 且(K-A*i)/B结果 小于等于 B的个数Y
choice.append([i, tmp])
def combination(m,n): # 组合的个数 C(m,n)
res = 1
for i in range(1, n+1):
res *= (m+1-i)
res //= i # 必须要有 整除//
return res
res = 0
for cho in choice:
res += (combination(X, cho[0]))*(combination(Y, cho[1]))
print(res % 1000000007) # 记住要 %
'''case通过率为80.00%
用例:
100
1 100 2 100
对应输出应该为:480218926
你的输出为:941226889
'''
def func(a, x, b, y, k):
def num_comb(p, q):
s1, s2 = 1, 1
for i in range(p-q+1, p+1):
s1 *= i
for j in range(1, q+1):
s2 *= j
return s1 // s2
l1, l2 = [], []
temp = []
for i in range(1, x+1):
l1.append(a*i)
for j in range(1, y+1):
l2.append(b*j)
for i in l1:
for j in l2:
if i+j == k:
temp.append([i, j])
ans = 0
for i in temp:
n = i[0] // a
m = i[1] // b
ans += num_comb(x, n) * num_comb(y, m)
return ans % 1000000007
k = int(input())
a, x, b, y = [int(i) for i in input().split()]
print(func(a, x, b, y, k))
| [
"[email protected]"
] | |
dcb9911944273018ed8122dfcf2f2c457e33e4e0 | 9d4dfc8ccb2d4f07671f446b5148e292f025658c | /illumidesk/decorators.py | 5720233732633b64998117b7eb988f7b1dce16bf | [
"MIT"
] | permissive | agenteAND/django | fb7274367c9a42cfc32d795ae08c040c98f136ca | b1c07ea48f1c09f191acc9e9b69b3f95ec37c15b | refs/heads/master | 2022-09-21T04:27:25.097507 | 2020-05-14T21:22:59 | 2020-05-14T21:22:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | from stripe.error import CardError
from django.http import JsonResponse
class catch_stripe_errors:
"""
Meant to be used with django views only.
"""
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
try:
return self.f(*args, **kwargs)
except CardError as e:
return JsonResponse({
'error': {
'message': e._message,
}
}, status=400)
| [
"[email protected]"
] | |
443b81e5f94d75064912a20120333cfd62c26c9c | 677c870cba802a658c03cd3f44eccf8bab919c8d | /html_parser.py | 2cbc987210a518ef06d10a60b0544fe85e444be5 | [] | no_license | proseno/khnu_page_parser | 7ad81706b778bd6e34c9899324a5f35fbe2153b1 | 0baa304d47e18a9fe383f3df64437428a084e543 | refs/heads/master | 2023-05-30T06:09:24.039270 | 2021-05-25T20:40:20 | 2021-05-25T20:40:20 | 370,155,534 | 0 | 0 | null | 2021-06-02T07:41:44 | 2021-05-23T20:56:02 | Python | UTF-8 | Python | false | false | 2,192 | py | import requests
from bs4 import BeautifulSoup
import config
media_url = config.media_url
def get_html(url: str):
response = requests.get(url)
return BeautifulSoup(response.text, 'html.parser')
def validate_img(imgs: list):
images = []
for img in imgs:
if img.startswith('data:image'):
images.append(img)
else:
images.append(media_url + img)
return images
def get_img_src(html):
img_tags = html.findAll('img')
src = []
for img_tag in img_tags:
src.append(img_tag.attrs['src'])
return validate_img(src)
def get_a_href(html):
a_tags = html.findAll('a')
href = []
for a_tag in a_tags:
href.append(a_tag.attrs['href'])
return validate_href(href)
def validate_href(hrefs: list):
result = []
for href in hrefs:
if href.startswith('http'):
result.append(href)
else:
result.append(media_url + href)
return result
def prepare_data(url: str):
html = get_html(url)
response = 'The result of parsing page: ' + config.site_url + ' \n'
if (config.images):
image_urls = get_img_src(html)
img_count = set_img_links(image_urls)
response += 'Image tag count = ' + str(img_count) + ' ,details in var/img_links.txt\n'
if (config.links):
links = get_a_href(html)
links_count = set_href(links)
response += 'Links tag count = ' + str(links_count) + ' , details in var/links.txt'
return response
def prepare_img_response(imgs: list):
response = 'Image urls on the matched page: \n'
for img in imgs:
response += img + '\n'
return response
def prepare_href_response(hrefs: list):
response = 'Links on the matched page: \n'
for href in hrefs:
response += href + '\n'
return response
def set_img_links(imgs: list):
file = open('var/img_links.txt', 'w')
content = prepare_img_response(imgs)
file.write(content)
file.close()
return len(imgs)
def set_href(hrefs: list):
file = open('var/links.txt', 'w')
content = prepare_href_response(hrefs)
file.write(content)
file.close()
return len(hrefs)
| [
"[email protected]"
] | |
03d9f6ea8d74c9e333c0aaade43fc75499d344f1 | 61afd923551491846ae827821f55c4fb5fd04c98 | /packages/levylab_lib_cryostation_instrument/levylab_lib_cryostation_instrument-1.1.6.14.spec | 8f7f20861cbb361567047f419ac7f8f4aa627395 | [
"BSD-3-Clause"
] | permissive | laserengineer/levylabpitt.github.io | b74b711aff2a5eb1b46f880a1071ac0873f1a9ac | cdf9aeb6faaf136211291ce2232c239229d85bbe | refs/heads/master | 2023-04-29T02:36:48.736236 | 2021-05-14T19:20:40 | 2021-05-14T19:20:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,036 | spec | [Package]
Name="levylab_lib_cryostation_instrument"
Version="1.1.6.14"
Release=""
ID=d119881f9f9cba80ac72480ce35bf7a0
File Format="vip"
Format Version="2017"
Display Name="Cryostation"
[Description]
Description="Program for logging Montana Instruments Cryostation status to the DSC. Provides a UI and API for getting information from the Cryostation."
Summary=""
License="BSD-3"
Copyright="Copyright (c) 2021, LevyLab"
Distribution=""
Vendor="LevyLab"
URL=""
Packager="Patrick Irvin"
Demo="FALSE"
Release Notes="[1.1.6]\0D\0A- Update to Instrument v1.8.3\0A- fix UI issues"
System Package="FALSE"
Sub Package="FALSE"
License Agreement="TRUE"
[LabVIEW]
close labview before install="FALSE"
restart labview after install="FALSE"
skip mass compile after install="FALSE"
[Platform]
Exclusive_LabVIEW_Version="LabVIEW>=16.0"
Exclusive_LabVIEW_System="ALL"
Exclusive_OS="ALL"
[Script VIs]
PreInstall=""
PostInstall=""
PreUninstall=""
PostUninstall=""
Verify=""
PreBuild=""
PostBuild=""
[Dependencies]
AutoReqProv=FALSE
Requires="jki_lib_caraya>=1.1.0.119,jki_lib_state_machine>=2018.0.7.45,jki_statemachineobjects>=1.3.0.56,lava_lib_ui_tools>=1.4.1.74,lvh_toolbox>=2.0.0.35,mgi_lib_application_control>=1.1.1.10,mgi_lib_cluster>=1.1.0.1,mgi_lib_error_handling>=1.1.1.3,mgi_lib_error_reporter>=1.0.2.5,mgi_lib_file>=1.1.0.4,mgi_lib_picture_&_image>=1.0.2.1,mgi_lib_read_write_anything>=2.1.4.4,mgi_lib_string>=1.1.1.5,national_instruments_lib_guid_generator>=1.0.2.3,ni_lib_stm>=3.1.0.9,oglib_appcontrol>=4.1.0.7,oglib_error>=4.2.0.23,oglib_file>=4.0.1.22,oglib_lvdata>=5.0.0.27,oglib_numeric>=4.1.0.8,oglib_string>=5.0.0.25,oglib_time>=4.0.1.3,oglib_variantconfig>=4.0.0.5,levylab_lib_levylab_instruments>=1.8.3.101"
Conflicts=""
[Activation]
License File=""
Licensed Library=""
[Files]
Num File Groups="3"
Sub-Packages=""
Namespaces=""
[File Group 0]
Target Dir="<application>"
Replace Mode="Always"
Num Files=31
File 0="user.lib/LevyLab/Cryostation/Cryostation Monitor and Control.vi"
File 1="user.lib/LevyLab/Cryostation/Cryostation_inst.lvproj"
File 2="user.lib/LevyLab/Cryostation/instrument.Cryostation UI/Instrument UI.Cryostation.lvclass"
File 3="user.lib/LevyLab/Cryostation/instrument.Cryostation UI/Launch Cryostation UI.vi"
File 4="user.lib/LevyLab/Cryostation/instrument.Cryostation UI/Process.vi"
File 5="user.lib/LevyLab/Cryostation/instrument.Cryostation/instrument.Cryostation.lvclass"
File 6="user.lib/LevyLab/Cryostation/instrument.Cryostation/instrument.Cryostation.TestLauncher.vi"
File 7="user.lib/LevyLab/Cryostation/instrument.Cryostation/Process.vi"
File 8="user.lib/LevyLab/Cryostation/instrument.Cryostation/Typedefs/Cryostation.Commands--enum.ctl"
File 9="user.lib/LevyLab/Cryostation/instrument.Cryostation/Typedefs/Cryostation.getAll--cluster.ctl"
File 10="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/All Unit Tests.vi"
File 11="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Client.vi"
File 12="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Command Enum to String.vi"
File 13="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get Chamber Pressure.vi"
File 14="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get DBL.vi"
File 15="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get Platform Temperature.vi"
File 16="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get Sample Temperature.vi"
File 17="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get Stage 1 Temperature.vi"
File 18="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Get Stage 2 Temperature.vi"
File 19="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Cryostation.Prepend Length.vi"
File 20="user.lib/LevyLab/Cryostation/instrument.Cryostation/private/Unit Test.vi"
File 21="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Close Instrument.vi"
File 22="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Get SMO Name.vi"
File 23="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Get SMO PGSQL Log Paths.vi"
File 24="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Get SMO Port.vi"
File 25="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Get SMO Public API.vi"
File 26="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/getAll.vi"
File 27="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Handle Command.vi"
File 28="user.lib/LevyLab/Cryostation/instrument.Cryostation/Overrides/Open Instrument.vi"
File 29="user.lib/LevyLab/Cryostation/instrument.Cryostation/API/Get Temperature.vi"
File 30="user.lib/LevyLab/Cryostation/instrument.Cryostation/API/Open.vi"
[File Group 1]
Target Dir="<menus>/Categories/LevyLab"
Replace Mode="Always"
Num Files=1
File 0="functions_LevyLab_lib_Cryostation_Instrument.mnu"
[File Group 2]
Target Dir="<menus>/Categories/LevyLab"
Replace Mode="If Newer"
Num Files=1
File 0="dir.mnu"
| [
"[email protected]"
] | |
0490440560a38678c76c944cc2ce8393e211b6ba | 6d79783fa490d209e19639276051fc9e7e470fba | /integration-test/1491-south-korean-shields.py | 8aac2e322fbca38a8279b6c1866c061e2849b73f | [] | permissive | va2ron1/vector-datasource | d9007c2558d6866f75679d1a711a62044fdc81ab | 0145844e27d1e3cb3256b449040511b6791febd7 | refs/heads/master | 2021-05-10T15:30:59.456868 | 2019-06-18T00:36:58 | 2019-06-18T00:36:58 | 118,549,672 | 0 | 0 | MIT | 2019-06-18T00:36:59 | 2018-01-23T03:03:00 | Python | UTF-8 | Python | false | false | 13,316 | py | # -*- encoding: utf-8 -*-
from . import FixtureTest
class SouthKoreanShields(FixtureTest):
def test_asianhighway(self):
import dsl
z, x, y = (16, 55875, 25370)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/547188348
dsl.way(547188348, dsl.tile_diagonal(z, x, y), {
'name:en': 'Tongil-ro', 'name': u'통일로', 'review': 'no',
'source': 'openstreetmap.org', 'highway': 'primary',
}),
dsl.relation(1, {
'alt_name': u'아주공로 1호선', 'int_ref': 'AH1', 'layer': '1',
'section': 'Korea', 'int_name': 'Asian Highway AH1',
'network': 'AH', 'name': u'아시안 하이웨이 1호선',
'name:en': 'Asian Highway AH1', 'ref': 'AH1', 'route': 'road',
'source': 'openstreetmap.org', 'state': 'connection',
'type': 'route', 'wikidata': 'Q494205', 'wikipedia': 'en:AH1',
}, ways=[547188348]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 547188348,
'shield_text': '1',
'network': 'AsianHighway',
})
def test_asianhighway_no_relation(self):
import dsl
z, x, y = (16, 55886, 25381)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/37399710
dsl.way(37399710, dsl.tile_diagonal(z, x, y), {
'tunnel:name:ko_rm': 'Namsan il ho teoneol', 'tunnel': 'yes',
'layer': '-2', 'name:en': 'Samil-daero', 'name': u'삼일대로',
'tunnel:name:ko': u'남산1호터널', 'name:ko': u'삼일대로',
'review': 'no', 'name:ko_rm': 'Samil-daero',
'tunnel:name:en': 'Namsan 1 Ho Tunnel',
'source': 'openstreetmap.org', 'ncat': u'광역시도로',
'oneway': 'yes', 'tunnel:name': u'남산1호터널', 'ref': 'AH1',
'toll': 'yes', 'highway': 'primary', 'name:ja': u'三一大路',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 37399710,
'shield_text': '1',
'network': 'AsianHighway',
})
def test_kr_expressway_rel_no_net(self):
import dsl
z, x, y = (16, 55975, 25658)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/90611594
dsl.way(90611594, dsl.tile_diagonal(z, x, y), {
'name:en': u'Tongyeong–Daejeon Expressway', 'lanes': '2',
'name': u'통영대전고속도로', 'name:ko': u'통영대전고속도로',
'name:ko_rm': 'Tongyeong-daejeon-gosokdoro',
'source': 'openstreetmap.org', 'maxspeed': '100',
'oneway': 'yes', 'ref': '35', 'highway': 'motorway',
}),
dsl.relation(1, {
'layer': '1', 'name:en': u'Tongyeong–Daejeon Expressway',
'name': u'통영대전고속도로', 'name:ko': u'통영대전고속도로',
'type': 'route', 'route': 'road',
'source': 'openstreetmap.org', 'ref': '35',
}, ways=[90611594]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 90611594,
'shield_text': '35',
'network': 'KR:expressway',
})
def test_kr_expressway(self):
import dsl
z, x, y = (16, 55904, 25415)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/59242897
dsl.way(59242897, dsl.tile_diagonal(z, x, y), {
'name:en': 'Seoul Ring Expressway', 'lanes': '4',
'name': u'서울외곽순환고속도로',
'name:ko': u'서울외곽순환고속도로',
'name:ko_rm': 'Seouloegwaksunhwangosokdoro',
'source': 'openstreetmap.org', 'oneway': 'yes', 'ref': '100',
'highway': 'motorway',
}),
dsl.relation(1, {
'name:en': 'Seoul Ring Expressway(KEC), bound for '
'Pangyo(Ilsan)',
'name': u'서울외곽순환고속도로(도로공사) 판교(일산)방향',
'name:ko': u'서울외곽순환고속도로(도로공사) 판교(일산)방향',
'route': 'road', 'source': 'openstreetmap.org',
'operator': 'Korea Expressway Corporation', 'type': 'route',
'road': 'kr:expressway', 'network': 'KR:expressway',
}, ways=[59242897]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 59242897,
'shield_text': '100',
'network': 'KR:expressway',
})
def test_kr_national(self):
import dsl
z, x, y = (16, 55864, 25396)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/71503022
dsl.way(71503022, dsl.tile_diagonal(z, x, y), {
'name:en': 'Nambusunhwan-ro', 'name': u'남부순환로',
'name:ko': u'남부순환로', 'source': 'openstreetmap.org',
'oneway': 'yes', 'ref': '92', 'highway': 'primary',
'name:ja': u'南部循環路',
}),
dsl.relation(1, {
'type': 'route', 'route': 'road', 'ref': '92',
'network': 'KR:national', 'source': 'openstreetmap.org',
}, ways=[71503022]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 71503022,
'shield_text': '92',
'network': 'KR:national',
})
def test_kr_national_no_rel(self):
import dsl
z, x, y = (16, 56158, 25837)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/542451694
dsl.way(542451694, dsl.tile_diagonal(z, x, y), {
'name:en': 'Upo 1-daero', 'name': u'우포1대로',
'name:ko': u'우포1대로', 'review': 'no',
'source': 'openstreetmap.org', 'highway': 'primary',
'ref': '20;24', 'ncat': u'국도',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 542451694,
'shield_text': '20', 'network': 'KR:national',
'all_shield_texts': ['20', '24'],
'all_networks': ['KR:national', 'KR:national'],
})
def test_kr_expressway_no_rel(self):
import dsl
z, x, y = (16, 55923, 25876)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/574671133
dsl.way(574671133, dsl.tile_diagonal(z, x, y), {
'name:en': 'Gwangjudaegu Expressway',
'name': u'광주대구고속도로', 'name:ko': u'광주대구고속도로',
'review': 'no', 'name:ko_rm': 'Gwangjudaegugosokdoro',
'source': 'openstreetmap.org', 'maxspeed': '80',
'ncat': u'고속도로', 'oneway': 'yes', 'ref': '12',
'highway': 'motorway',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 574671133,
'shield_text': '12',
'network': 'KR:expressway',
})
def test_kr_expressway_no_name_en(self):
import dsl
z, x, y = (16, 56165, 25760)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/43543281
dsl.way(43543281, dsl.tile_diagonal(z, x, y), {
'lanes': '2', 'name': u'중부내륙고속도로지선', 'review': 'no',
'source': 'openstreetmap.org', 'highway': 'motorway',
'oneway': 'yes', 'ref': '451', 'ncat': u'고속도로',
}),
dsl.relation(1, {
'type': 'route', 'route': 'road', 'ref': '451',
'source': 'openstreetmap.org',
}, ways=[43543281]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 43543281,
'shield_text': '451',
'network': 'KR:expressway',
})
def test_kr_expressway_no_name_en_no_ncat(self):
# same as the test above, but without the "ncat" to test that it
# backfills from the name.
import dsl
z, x, y = (16, 56165, 25760)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/43543281
dsl.way(43543281, dsl.tile_diagonal(z, x, y), {
'lanes': '2', 'name': u'중부내륙고속도로지선', 'review': 'no',
'source': 'openstreetmap.org', 'highway': 'motorway',
'oneway': 'yes', 'ref': '451',
}),
dsl.relation(1, {
'type': 'route', 'route': 'road', 'ref': '451',
'source': 'openstreetmap.org',
}, ways=[43543281]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 43543281,
'shield_text': '451',
'network': 'KR:expressway',
})
def test_kr_jungbunaeryukgosokdoro(self):
import dsl
z, x, y = (16, 56156, 25839)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/562319872
dsl.way(562319872, dsl.tile_diagonal(z, x, y), {
'name:en': 'Jungbunaeryuk Expressway', 'lanes': '2',
'name': u'중부내륙고속도로', 'name:ko': u'중부내륙고속도로',
'review': 'no', 'name:ko_rm': 'Jungbunaeryukgosokdoro',
'source': 'openstreetmap.org', 'ncat': u'고속도로',
'oneway': 'yes', 'ref': '45', 'toll': 'yes',
'highway': 'motorway',
}),
dsl.relation(1, {
'name:en': 'Jungbunaeryuk Expressway',
'name': u'중부내륙고속도로', 'name:ko': u'중부내륙고속도로',
'ref': '45', 'route': 'road', 'source': 'openstreetmap.org',
'type': 'route',
}, ways=[562319872]),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 562319872,
'shield_text': '45',
'network': 'KR:expressway',
})
def test_kr_upo_2_ro(self):
import dsl
z, x, y = (16, 56158, 25837)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/179815107
dsl.way(179815107, dsl.tile_diagonal(z, x, y), {
'name:en': 'Upo 2-ro', 'name': u'우포2로', 'name:ko': u'우포2로',
'review': 'no', 'source': 'openstreetmap.org',
'highway': 'secondary', 'ref': '1080', 'ncat': u'지방도',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 179815107,
'shield_text': '1080',
'network': 'KR:local',
})
def test_kr_special_city(self):
import dsl
z, x, y = (16, 55879, 25372)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/37395768
dsl.way(37395768, dsl.tile_diagonal(z, x, y), {
'bridge': 'viaduct', 'layer': '2',
'name:en': 'Naebusunhwan-ro', 'bicycle': 'no',
'name': u'내부순환로', 'name:ko': u'내부순환로', 'review': 'no',
'source': 'openstreetmap.org', 'ncat': u'특별시도',
'oneway': 'yes', 'ref': '30', 'highway': 'trunk',
'name:ja': u'内部循環路',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 37395768,
'shield_text': '30',
'network': 'KR:metropolitan',
})
def test_kr_metropolitan(self):
import dsl
z, x, y = (16, 56178, 25761)
self.generate_fixtures(
dsl.is_in('KR', z, x, y),
# https://www.openstreetmap.org/way/577716125
dsl.way(577716125, dsl.tile_diagonal(z, x, y), {
'name:en': 'Jungang-daero',
'name': u'중앙대로', 'name:ko': u'중앙대로', 'review': 'no',
'name:ko_rm': 'Jungangdaero', 'source': 'openstreetmap.org',
'highway': 'primary', 'ref': '61', 'ncat': u'광역시도로',
}),
)
self.assert_has_feature(
z, x, y, 'roads', {
'id': 577716125,
'shield_text': '61',
'network': 'KR:metropolitan',
})
| [
"[email protected]"
] | |
674e623c45465fc203f725564aee708d13f84126 | 0af5dd89984fbfd4b6b2d699b1f5cede0cf51a0a | /10/10.py | d227addf1fead6284496707d6c8d0afda8b0528f | [] | no_license | seanyliu/advent2020 | 2c9a6f63116aa8eb672e45ef3d0b2f1bb7fbb1e3 | fd922f9c89dd04ab199c4c493c1debd1253c74cd | refs/heads/main | 2023-02-03T18:47:35.635694 | 2020-12-25T06:39:42 | 2020-12-25T06:39:42 | 319,216,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,185 | py | # regular imports ########################
import math
import os, sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import helpers
# functions ##############################
# actual code ############################
input_lines = helpers.read_lines_from_file('input.txt')
input_lines = helpers.convert_array_to_int(input_lines)
# Part 1 #################################
input_lines.sort()
current_jolt = 0
count_singles = 0
count_triples = 1 # built-in is 3 higher
for jolt in input_lines:
delta = jolt - current_jolt
if delta == 1:
count_singles += 1
elif delta == 3:
count_triples += 1
current_jolt = jolt
print(count_singles * count_triples)
# Part 2 #################################
#input_lines.append(input_lines[len(input_lines)-1]+3)
input_lines.insert(0, 0)
paths_to_get_to = {}
print(input_lines)
for i in range(len(input_lines)):
curr_jolt = input_lines[i]
print("starting at curr_jolt: "+str(curr_jolt))
for j in range(i+1, len(input_lines)):
next_jolt = input_lines[j]
if next_jolt - curr_jolt <= 3:
if next_jolt not in paths_to_get_to:
paths_to_get_to[next_jolt] = set()
paths_to_get_to[next_jolt].add(curr_jolt)
else:
break
print(paths_to_get_to)
mem_count = {}
# special count the first node as a 1
mem_count[input_lines[0]] = 1
for node in paths_to_get_to:
if node not in mem_count:
mem_count[node] = 0
count = 0
for incoming in paths_to_get_to[node]:
count += mem_count[incoming]
mem_count[node] = count
print(mem_count)
print(mem_count[input_lines[len(input_lines)-1]])
# basically we want to multiply the number of ways that you can get to each number together
"""
(0), 1, 4, 5, 6, 7, 10, 11, 12, 15, 16, 19, (22)
(0), 1, 4, 5, 6, 7, 10, 12, 15, 16, 19, (22)
(0), 1, 4, 5, 7, 10, 11, 12, 15, 16, 19, (22)
(0), 1, 4, 5, 7, 10, 12, 15, 16, 19, (22)
(0), 1, 4, 6, 7, 10, 11, 12, 15, 16, 19, (22)
(0), 1, 4, 6, 7, 10, 12, 15, 16, 19, (22)
(0), 1, 4, 7, 10, 11, 12, 15, 16, 19, (22)
(0), 1, 4, 7, 10, 12, 15, 16, 19, (22)
{4: {1}, 5: {4}, 6: {4, 5}, 7: {4, 5, 6}, 10: {7}, 11: {10}, 12: {10, 11}, 15: {12}, 16: {15}, 19: {16}}
1 4 5
"""
| [
"[email protected]"
] | |
a2d93775dabbb32932e0e985b631aed8e60ef85e | 0db8321288a365f655fa64ffaa619edc60efa561 | /venv/bin/chardetect | c14e6a556617048cb058ebeafb583bf57490d33f | [] | no_license | anarya007x3/MyFirstBlogOnDjango | 27ed851a674d6cd3c025ee370c39005e11fa8410 | 5855811983dad7637b1c738cd8b988a2563a9d45 | refs/heads/master | 2023-06-08T20:04:19.908467 | 2018-06-12T11:27:36 | 2018-06-12T11:27:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | #!/home/user/Documents/Ananstasiia/blogProject/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
6a69052e64507ccba030432fd997058677ffd1e0 | d858096f34d23cf5dd2596cd2e36a8a43a52c6ed | /launchers/Ecsv2compare.py | a386672072b4faa9fdeb381acd835927fa12d30e | [
"CC-BY-4.0"
] | permissive | marieBvr/virAnnot | 43aff595befc9f3da817253020310f2b6099039d | 2dd85e0234bad3fc0044c76e978be5db001620dd | refs/heads/master | 2023-04-12T21:46:41.576965 | 2020-07-02T09:37:29 | 2020-07-02T09:37:29 | 124,216,217 | 3 | 1 | null | 2018-03-07T10:02:26 | 2018-03-07T10:02:25 | null | UTF-8 | Python | false | false | 1,663 | py | # to allow code to work with Python 2 and 3
from __future__ import print_function # print is a function in python3
from __future__ import unicode_literals # avoid adding "u" to each string
from __future__ import division # avoid writing float(x) when dividing by x
import os.path
import logging as log
import sys
class Ecsv2compare:
def __init__ (self, args):
self.check_args(args)
self.cmd = []
self.create_cmd()
def create_cmd (self):
cmd = 'ecsv2compare.py'
for c in self.blast_files:
cmd += ' -b ' + str(c)
if self.rps_file != '':
cmd += ' -r ' + self.rps_file
cmd += ' -o ' + self.out
log.debug(cmd)
self.cmd.append(cmd)
def check_args (self, args=dict):
self.execution=1
self.sample = args['sample']
self.wd = os.getcwd() + '/' + self.sample
self.cmd_file = self.wd + '/' + 'ecsv2compare_cmd.txt'
if 'out' in args:
self.out = self.wd + '/' + args['out']
self.blast_files = []
for i in range(1, 10, 1):
opt_name = 'b' + str(object=i)
if opt_name in args:
if os.path.exists(self.wd + '/' + args[opt_name]):
self.blast_files.append(self.wd + '/' + args[opt_name])
if 'r' in args:
if os.path.exists(self.wd + '/' + args['r']):
self.rps_file = self._check_file(self.wd + '/' + args['r'])
else:
self.rps_file = ''
else:
self.rps_file = ''
if len(self.blast_files) == 0:
self.execution=0
if 'sge' in args:
self.sge = bool(args['sge'])
else:
self.sge = False
if 'n_cpu' in args:
self.n_cpu = str(args['n_cpu'])
else:
self.n_cpu = '1'
def _check_file(f):
try:
open(f)
return f
except IOError:
print('File not found ' + f)
sys.exit(1)
| [
"[email protected]"
] | |
0bec2e6db74489df2b638de7366dfb69cba71b50 | b037e36771bd47cb84a9ce9745ffeac801256b56 | /hot_info/theverge_info/theverge_info/items.py | db59aa862f23f73e847aa9dac6994e1c3683a81c | [] | no_license | SuperMMORPG/scrapy_project | 7004714ede620bddf30dd15d8779322959087280 | ccac1089de94064077444026b3a1f484910d7543 | refs/heads/main | 2023-05-15T02:59:24.195936 | 2021-06-09T15:43:03 | 2021-06-09T15:43:03 | 344,821,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | # Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class ThevergeInfoItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
| [
"[email protected]"
] | |
f0b46609d6049e4e7911d31a9c686bc43faa06ae | a0098a078c5abe8b955e0a927e398bde2d054d85 | /혜선/18-2.py | 0113ceef42de92fac696f0b3f8332b8479a01b57 | [] | no_license | baehanjae/21_Summmer_PPS | 39af8339cb74cb1bf1abed8e97c9eba8532a9643 | 2d0f33424be53d25d994d5500fa11b94a06fd522 | refs/heads/master | 2023-06-27T04:31:18.423748 | 2021-08-01T12:44:39 | 2021-08-01T12:44:39 | 383,336,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | n=int(input())
dp = [100001]*(n+1) #지불해야 하는 금액의 MAX
dp[0]=0 #0원은 동전 0개로 가능
coin=[7,5,2,1]
for m in range(0,n+1) : #1원부터 지불해야 하는 n원까지
for c in coin :
#동전 액면가보다 지불해야 하는 금액이 크고
#동전 c를 주는 것이(dp[m-c]+1) 기존 경우(dp[m])보다 개수를 줄일 수 있는 경우
if c<=m and dp[m-c]+1<dp[m] :
dp[m]=dp[m-c]+1
print(dp[-1])
"""
출처: https://cieske.tistory.com/11
"""
| [
"[email protected]"
] | |
ece28edc023bb04980d274572bdeb3d73e5f31e6 | 95180ab57705ed8940804ce67d1732f30984e3cd | /Mission0Testing.py | f3ae6b6ec1f711d9a518a407d6cc08ebd4a53d20 | [] | no_license | caiprozect/Bioinformatics_Python | a3e0df528d1a81ebffb4658b508b0d4a4885c555 | dbed1310b756715c68210d4a9c25d772edcb5d58 | refs/heads/master | 2021-03-30T22:05:22.790990 | 2018-05-26T04:56:20 | 2018-05-26T04:56:20 | 124,543,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,694 | py | import multiprocessing as mp
from collections import defaultdict #Auto initialization
from functools import reduce
from time import time
import os, re, psutil
def make_seq(infile, outfile): #Sanity check and Make one line seq file
inFileH = open(infile, 'r')
inFileH.readline() #Skip the first line
inFileLines = inFileH.read().upper().splitlines()
inFileH.close()
outFileH = open(outfile, 'w')
outFileH.write("".join(inFileLines))
outFileH.close()
print("\tSeq file has been created")
print("\t=================================================")
def chunk_process(file, start, size, polyLen):
partFreqDict = defaultdict(int)
with open(file, "rb") as inFileH:
inFileH.seek(start)
sPartText = inFileH.read(size).upper()
nPartTextLen = len(sPartText)
for i in range(nPartTextLen-polyLen+1):
sPtn = sPartText[i:i+polyLen].decode('utf-8')
if 'N' not in sPtn:
partFreqDict[sPtn] += 1
#Endfor
return partFreqDict
def make_chunk(file, polyLen, size = 1024*1024*64):
fileEnd = os.path.getsize(file)
inFileH = open(file, 'rb')
chunkEnd = inFileH.tell()
while True:
chunkStart = (chunkEnd - polyLen + 1) if (chunkEnd - polyLen + 1 >= 0) else chunkEnd
inFileH.seek(chunkStart)
inFileH.seek(size, 1)
chunkEnd = inFileH.tell()
yield chunkStart, chunkEnd - chunkStart
if chunkEnd > fileEnd:
inFileH.close()
break
#EndWhile
def sum_dicts(dictA, dictB):
summedDict = defaultdict(int)
unionKeys = set(dictA.keys()) | set(dictB.keys())
for key in unionKeys:
summedDict[key] = dictA[key] + dictB[key]
#EndFor
return summedDict
def main():
sFName = "../data/chroms/chr1.fa"
sSFile = "../data/Seq.fa"
make_seq(sFName, sSFile)
nPolymerLen = 1 #Counting monomers for this HW
mpPool = mp.Pool() #Default option uses maximum number of cpu cores
lJobs = []
for ptrChunkStart, ptrChunkSize in make_chunk(sSFile, nPolymerLen):
lJobs.append( mpPool.apply_async(chunk_process, (sSFile, ptrChunkStart, ptrChunkSize, nPolymerLen)) )
wFreq = reduce((lambda x,y: sum_dicts(x,y)), [job.get() for job in lJobs])
lPolymers = wFreq.keys()
nWNumbPoly = sum(wFreq.values())
for sPolymer in lPolymers:
print("\tNumber of {0}: {1:d} \tFrequency of {0}: {2:f}".format(sPolymer, wFreq[sPolymer], float(wFreq[sPolymer])/nWNumbPoly))
mpPool.close()
print("\t=================================================")
#print(psutil.Process().open_files())
#assert(psutil.Process().open_files()==[]), "Bad: There are some open file handles!!!"
#print("\tGood: All the file handles are properly closed!!")
print("\t---The End---")
if __name__ == "__main__":
rtime = time()
main()
rtime = time() - rtime
print("Took {} seconds to run".format(rtime)) | [
"[email protected]"
] | |
ce97ab0a834e954734be12d143f790bd1e4d881f | 439f54a059323010eae9a3aa229f8f973ba6214b | /locallibrary/catalog/views.py | 9a7afa5c1eec807d2c9b6aab66553ccfe1d59a29 | [] | no_license | arturolei/locallibrary | 47bd342408a2384db25d1ab5226c0cb557824180 | 6aff863e78dd585f449d1e258f3793c9462c78f5 | refs/heads/master | 2020-03-14T14:45:51.701221 | 2018-05-06T02:09:23 | 2018-05-06T02:09:23 | 131,660,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | from django.shortcuts import render
# Create your views here.
from .models import Book, Author, BookInstance, Genre
def index(request):
"""
View function for home page of site.
"""
# Generate counts of some of the main objects
num_books=Book.objects.all().count()
num_instances=BookInstance.objects.all().count()
# Available books (status = 'a')
num_instances_available=BookInstance.objects.filter(status__exact='a').count()
num_authors=Author.objects.count() # The 'all()' is implied by default.
# Render the HTML template index.html with the data in the context variable
return render(
request,
'index.html',
context={'num_books':num_books,'num_instances':num_instances,'num_instances_available':num_instances_available,'num_authors':num_authors},
)
| [
"[email protected]"
] | |
c7b39df0402379badebebcb7b3d09c8c1bae81c8 | c4bd2939dc0273a5330c4f8151c9d05df2551d87 | /project/apps/post/models.py | 102e3c3674b77a817deb0eb827e78b66ff86373a | [] | no_license | MishaZaharyak/BlogPost | 42ca1e214cc84b76d9bf21f2247adab0407857c4 | ac9a8108bb9742909884d29a68285d68e86856a0 | refs/heads/master | 2023-01-10T03:22:45.041333 | 2020-11-09T03:59:24 | 2020-11-09T03:59:24 | 310,871,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,203 | py | from django.db import models
from django.utils import timezone
from utils.abstract import DateTimeAbstractModel
from utils.utils import get_file_upload_path
from apps.user.models import UserModel, VisitorModel
class PostModel(DateTimeAbstractModel):
title = models.CharField(max_length=255)
content = models.TextField()
image = models.ImageField(upload_to=get_file_upload_path)
created_at = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(auto_now=True)
# relations
author = models.ForeignKey(UserModel, on_delete=models.CASCADE, related_name="posts")
class Meta:
ordering = ['-created_at']
verbose_name = "Post"
verbose_name_plural = "Posts"
def __str__(self):
return self.title
class PostCommentModel(DateTimeAbstractModel):
text = models.TextField()
# relations
post = models.ForeignKey(PostModel, on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(VisitorModel, on_delete=models.CASCADE, related_name="my_comments")
class Meta:
ordering = ['-created_at']
verbose_name = "Post Comment"
verbose_name_plural = "Posts Comments"
| [
"[email protected]"
] | |
53ac7bd75874f87b90cf048f2ecbc53cb8d875e8 | b093b693658e3095b57cab55fd2ee959d1bc63cd | /api/tests.py | 9660627dbb56f01c8b5f2b90762130da729d7e7d | [] | no_license | SherSingh07/djagileproj | 566d7f2e0239e772e19619203303ed3abe259075 | 577d3213cead595a9d9b5ce82a27ad652dc15d96 | refs/heads/master | 2021-01-10T15:40:07.039727 | 2016-02-18T07:03:15 | 2016-02-18T07:03:15 | 52,005,654 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | import requests
import json
import datetime
from django.test import TestCase
from django.test import TestCase
from django.contrib.auth.models import User
from work.models import *
class RestApiTestCase(TestCase):
"""
test cases for RESTful APIs
"""
def test_get_all_projects(self):
"""
test cases to get JSON for all projects
"""
url = "http://localhost:8000/api/projects/"
r = requests.get(url)
print r.text, r.status_code
self.assertEqual(r.status_code, 200)
def test_get_project_detail(self):
"""
test case to get project detail by primary key
"""
url = "http://localhost:8000/api/projects/1"
r = requests.get(url)
print r.text, r.status_code
self.assertEqual(r.status_code, 200)
def test_get_all_tasks(self):
"""
test cases to get JSON for all tasks
"""
url = "http://localhost:8000/api/tasks/"
r = requests.get(url)
print r.text, r.status_code
self.assertEqual(r.status_code, 200)
def test_get_task_detail(self):
"""
test case to get task detail by primary key
"""
url = "http://localhost:8000/api/tasks/1"
r = requests.get(url)
print r.text, r.status_code
self.assertEqual(r.status_code, 200)
| [
"[email protected]"
] | |
5908b2b8d04eeac477a1af228589b6361c442a2e | be70f08f47158b03cbca1ff6504fc54604089155 | /_modules/mwdocker.py | 4cdbc8f1579ec99154a8ae9fd518251d1da3b447 | [
"MIT"
] | permissive | hunterfu/salt-microservices | 09d54a8cefa42476aa23b8b92923fe14debdae3f | d66c5fc7f721626407919b6480157d5ff608cb9e | refs/heads/master | 2021-01-22T15:00:51.056243 | 2016-05-13T08:57:17 | 2016-05-13T08:57:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,899 | py | # Copyright (c) 2015 Martin Helmich <[email protected]>
# Mittwald CM Service GmbH & Co. KG
#
# Docker-based microservice deployment with service discovery
# This code is MIT-licensed. See the LICENSE.txt for more information
try:
import docker
import docker.utils
import docker.errors
except ImportError:
def __virtual__():
return False, ["The docker-py package is not installed"]
import json
import logging
import time
log = logging.getLogger(__name__)
def container_ip(name):
"""
Determines the internal IP address of a Docker container.
:param name: The container name
:return: The container's internal IP address
"""
client = docker.Client(base_url='unix://var/run/docker.sock')
info = client.inspect_container(name)
return info['NetworkSettings']['IPAddress']
def container_published_port(name, container_port):
"""
Gets the port number of a publicly exposed container port.
:param name: The container name
:param int: The internal container port
:return: The host port that the container port is mapped on
"""
client = docker.Client(base_url='unix://var/run/docker.sock')
info = client.inspect_container(name)
return info['NetworkSettings']['Ports'][container_port]['HostPort']
def start_container(name, warmup_wait=60):
"""
Starts a Docker container. This function will wait for a defined amount of
time to check if the container actually stays up after being started. If the
container status is not "up" after the `warmup_wait` has expired, this
function will raise an exception.
:param name: The container name
:param int: How long this function should wait to check the container status
"""
log.info("Starting container %s" % name)
client = docker.Client(base_url='unix://var/run/docker.sock')
client.start(name)
# We need to sleep to prevent race conditions on application startup.
# For example, Flow applications that do a doctrine:migrate on startup.
log.info("Waiting %d seconds for container to start" % warmup_wait)
time.sleep(warmup_wait)
container_status = client.inspect_container(name)
if not container_status["State"]["Running"] or container_status["State"]["Restarting"]:
raise Exception('Container %s is not running after %d seconds. Status is: %s' % (
name, warmup_wait, container_status["State"]))
def image_id(image):
"""
Gets the image ID for a specified image name.
:param image: The image name
:return: The image ID
"""
if ':' not in image:
image += ":latest"
client = docker.Client(base_url='unix://var/run/docker.sock')
images = client.images()
for existing_image in images:
if image in existing_image['RepoTags']:
return existing_image['Id']
return None
def delete_container(name):
"""
Stops and deletes a container.
:param name: Name of the container to delete
"""
log.info("Deleting container %s" % name)
client = docker.Client(base_url='unix://var/run/docker.sock')
try:
client.inspect_container(name)
except docker.errors.NotFound:
log.info("Container %s was not present in the first place." % name)
return
client.stop(name)
client.remove_container(name)
def create_container(name, image, command=None, environment=None, volumes=(), udp_ports=None, tcp_ports=None,
restart=True, dns=None, domain=None, volumes_from=None, links=None, user=None, test=False):
"""
Creates a new container.
:param name: The container name
:param image: The image from which to create the container
:param command: The command to use for the container
:param environment: A dictionary of environment variables to pass into the
container
:param volumes: A list of volumes. Each volume definition is a string of the
format "<host-directory>:<container-directory>:<rw|ro>"
:param udp_ports: UDP ports to expose. This is a list of dictionaries that
must provide a "port" and an "address" key.
:param tcp_ports: TCP ports to expose. This is a list of dictionaries that
must provide a "port" and an "address" key.
:param restart: `True` to restart the container when it stops
:param dns: A list of DNS server addresses to use
:param domain: The DNS search domain
:param volumes_from: A list of container names from which to use the volumes
:param links: A dictionary of containers to link (using the container name
as index and the alias as value)
:param user: The user under which to start the container
:param test: Set to `True` to not actually do anything
"""
client = docker.Client(base_url='unix://var/run/docker.sock')
pull_image(image, force=False, test=test)
hostconfig_ports, ports = _create_port_definitions(udp_ports, tcp_ports)
hostconfig_binds, binds = _create_volume_definitions(volumes)
restart_policy = None
if restart:
restart_policy = {
"MaximumRetryCount": 0,
"Name": "always"
}
host_config = docker.utils.create_host_config(
binds=hostconfig_binds,
port_bindings=hostconfig_ports,
restart_policy=restart_policy,
dns=dns,
dns_search=[domain],
volumes_from=volumes_from,
links=links
)
if test:
log.info("Would create container %s" % name)
return None
log.info("Creating container %s" % name)
container = client.create_container(
name=name,
image=image,
command=command,
ports=ports,
host_config=host_config,
volumes=binds,
environment=environment,
user=user
)
return container['Id']
def pull_image(image, force=False, test=False):
"""
Pulls the current version of an image.
:param image: The image name. If no tag is specified, the `latest` tag is assumed
:param force: Set to `True` to pull even when a local image of the same name exists
:param test: Set to `True` to not actually do anything
"""
client = docker.Client(base_url='unix://var/run/docker.sock')
if ':' not in image:
image += ":latest"
images = client.images()
present = False
for existing_image in images:
if image in existing_image['RepoTags']:
present = True
repository, tag = image.split(':')
if not present or force:
if test:
log.info("Would pull image %s:%s" % (repository, tag))
else:
# noinspection PyUnresolvedReferences
log.info("Pulling image %s:%s" % (repository, tag))
pull_stream = client.pull(repository, tag, stream=True)
for line in pull_stream:
j = json.loads(line)
if 'error' in j:
raise Exception("Could not pull image %s:%s: %s" % (repository, tag, j['errorDetail']))
def _create_port_definitions(udp_ports, tcp_ports):
ports = []
port_bindings = {}
def walk_ports(port_definitions, protocol):
for binding in port_definitions:
host_port = binding['host_port'] if 'host_port' in binding else binding['port']
ports.append((binding['port'], protocol))
port_bindings["%d/%s" % (binding['port'], protocol)] = (binding['address'], host_port)
walk_ports(tcp_ports, 'tcp')
walk_ports(udp_ports, 'udp')
return port_bindings, ports
def _create_volume_definitions(volumes):
binds = {}
container_volumes = []
for bind in volumes:
r = bind.split(':')
mode = r[2] if len(r) > 2 else "rw"
container_volumes.append(r[1])
binds[r[0]] = {
"bind": r[1],
"mode": mode
}
return binds, container_volumes
| [
"[email protected]"
] | |
4210deb85f4027d105d5edf7c9a411994e71c743 | 0bbd083f3837f5af8b9505c9c57ad65654753fb2 | /myawesomeapp/myawesomeapp/urls.py | 6762b06e3ecebd4ac361a10a2e594d2af8c0ecaf | [] | no_license | Radbull123/django-apps | 4a94b9a5c0bae21fbdc4d79576311d04959f87d1 | 1bfaada612e3195da2ebbff1da70e5cf48923b69 | refs/heads/master | 2022-12-14T20:05:04.369739 | 2019-07-18T13:49:09 | 2019-07-18T13:49:09 | 197,149,192 | 0 | 0 | null | 2022-12-08T01:22:44 | 2019-07-16T08:05:37 | Python | UTF-8 | Python | false | false | 1,216 | py | """myawesomeapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('main/', include('main.urls'))
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.urls import path, include
from main import views as main_views
from users import views as users_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', main_views.home, name='home'),
path('register/', users_views.registration, name='register'),
path('login/', auth_views.LoginView.as_view(template_name='users/login.html'), name='login'),
path(
'logout/', auth_views.LogoutView.as_view(template_name='users/logout.html'), name='logout'
),
]
| [
"[email protected]"
] | |
107c6ce6eea6790adc1abe99c1ba467061e490c2 | 6abf65c295faaef59ef8fcc4de901b0ce92e9050 | /DoubleLinkedList.py | d413ec7216c708c5d3dc5a20ae62b4a189fe4968 | [] | no_license | codeAligned/DataStructures-1 | 89dd12eb7cdc085771d494810829c628b80f4fc7 | 116200f46e9bae79525bd82c3d0774f6ce2ad39e | refs/heads/master | 2020-05-19T04:54:02.723220 | 2017-07-10T00:51:46 | 2017-07-10T00:51:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,140 | py | class Node(object):
def __init__(self, data, next=None, prev=None):
self.data = data
self.next = next
self.prev = prev
def get_data(self):
return self.data
def get_next(self):
return self.next
def get_prev(self):
return self.prev
def set_next(self, n):
self.next = n
def set_prev(self, n):
self.prev = n
def LinkedList(object):
def __init__(self, head=None):
self.head = head
def insert(self, data):
new_node = Node(data)
current_node = self.head
next_node = current_node.get_next()
while next_node:
current_node = current_node.get_next()
current_node.set_next(n)
new_node.set_prev(current_node)
def delete(self, data):
current_node = self.head
prev_node = current_node.get_prev()
next_node = current_node.get_next()
found = False
while current_node and found is False:
if current_node.get_data() == data:
found = True
else:
current_node = current_node.get_next()
if prev_node is None:
self.head = next_node
if next_node is None:
raise ValueError('data not in list')
else:
previous_node.set_next(next_node)
next_node.set_prev(previous_node)
| [
"[email protected]"
] | |
b8b0ae01aebed89c4fcd1c8e915ea2361bdfabae | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/1/PyGame/game_20200606103144.py | 6ff9f41e5074746419088a6a5bb1697c348cd040 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,341 | py | # 1 - Import library
import pygame
from pygame.locals import *
import math
import random
import os
import json
# 2 - Initialize the game
pygame.init()
width, height = 640, 480
screen=pygame.display.set_mode((width, height))
keys = [False, False, False, False]
playerpos=[100,100]
acc=[0,0]
arrows=[]
badtimer=100
badtimer1=0
badguys=[[640,100]]
healthvalue=194
pygame.mixer.init()
# 3 - Load image
player = pygame.image.load("resources/images/dude.png")
grass = pygame.image.load("resources/images/grass.png")
castle = pygame.image.load("resources/images/castle.png")
arrow = pygame.image.load("resources/images/bullet.png")
badguyimg1 = pygame.image.load("resources/images/badguy.png")
badguyimg=badguyimg1
healthbar = pygame.image.load("resources/images/healthbar.png")
health = pygame.image.load("resources/images/health.png")
gameover = pygame.image.load("resources/images/gameover.png")
youwin = pygame.image.load("resources/images/youwin.png")
# 3.1 - Load audio
hit = pygame.mixer.Sound("resources/audio/explode.wav")
enemy = pygame.mixer.Sound("resources/audio/enemy.wav")
shoot = pygame.mixer.Sound("resources/audio/shoot.wav")
hit.set_volume(0.05)
enemy.set_volume(0.05)
shoot.set_volume(0.05)
pygame.mixer.music.load('resources/audio/moonlight.wav')
pygame.mixer.music.play(-1, 0.0)
pygame.mixer.music.set_volume(0.25)
# 4 - keep looping through
running = 1
exitcode = 0
while running:
badtimer-=1
# 5 - clear the screen before drawing it again
screen.fill(0)
# 6 - draw the player on the screen at X:100, Y:100
for x in range(width//grass.get_width()+1):
for y in range(height//grass.get_height()+1):
screen.blit(grass,(x*100,y*100))
# initialize font; must be called after 'pygame.init()' to avoid 'Font not Initialized' error
myfont = pygame.font.SysFont("monospace", 15)
# render text
label = myfont.render("Some text!", 1, (255,255,0))
screen.blit(label, (100, 100))
mpcs = []
dir_path = os.path.dirname(os.path.realpath(__file__)) + "/../save.json"
with open("") as json_file:
mpcs = json.load(json_file).map(lambda x: x.name)
step = height / mp
for mpc in mpcs:
screen.blit(castle,(0,30))
screen.blit(castle,(0,30))
screen.blit(castle,(0,135))
screen.blit(castle,(0,240))
screen.blit(castle,(0,345 ))
# 6.1 - Set player position and rotation
position = pygame.mouse.get_pos()
angle = math.atan2(position[1]-(playerpos[1]+32),position[0]-(playerpos[0]+26))
playerrot = pygame.transform.rotate(player, 360-angle*57.29)
playerpos1 = (playerpos[0]-playerrot.get_rect().width/2, playerpos[1]-playerrot.get_rect().height/2)
screen.blit(playerrot, playerpos1)
# 6.2 - Draw arrows
for bullet in arrows:
index=0
velx=math.cos(bullet[0])*10
vely=math.sin(bullet[0])*10
bullet[1]+=velx
bullet[2]+=vely
if bullet[1]<-64 or bullet[1]>640 or bullet[2]<-64 or bullet[2]>480:
arrows.pop(index)
index+=1
for projectile in arrows:
arrow1 = pygame.transform.rotate(arrow, 360-projectile[0]*57.29)
screen.blit(arrow1, (projectile[1], projectile[2]))
# 6.3 - Draw badgers
if badtimer==0:
badguys.append([640, random.randint(50,430)])
badtimer=100-(badtimer1*2)
if badtimer1>=35:
badtimer1=35
else:
badtimer1+=5
index=0
for badguy in badguys:
if badguy[0]<-64:
badguys.pop(index)
badguy[0]-=5
# 6.3.1 - Attack castle
badrect=pygame.Rect(badguyimg.get_rect())
badrect.top=badguy[1]
badrect.left=badguy[0]
if badrect.left<64:
hit.play()
healthvalue -= random.randint(5,20)
badguys.pop(index)
#6.3.2 - Check for collisions
index1=0
for bullet in arrows:
bullrect=pygame.Rect(arrow.get_rect())
bullrect.left=bullet[1]
bullrect.top=bullet[2]
if badrect.colliderect(bullrect):
enemy.play()
acc[0]+=1
badguys.pop(index)
arrows.pop(index1)
index1+=1
# 6.3.3 - Next bad guy
index+=1
for badguy in badguys:
screen.blit(badguyimg, badguy)
# 6.4 - Draw clock
font = pygame.font.Font(None, 24)
survivedtext = font.render(str((90000-pygame.time.get_ticks())/60000)+":"+str((90000-pygame.time.get_ticks())/1000%60).zfill(2), True, (0,0,0))
textRect = survivedtext.get_rect()
textRect.topright=[635,5]
screen.blit(survivedtext, textRect)
# 6.5 - Draw health bar
screen.blit(healthbar, (5,5))
for health1 in range(healthvalue):
screen.blit(health, (health1+8,8))
# 7 - update the screen
pygame.display.flip()
# 8 - loop through the events
for event in pygame.event.get():
# check if the event is the X button
if event.type==pygame.QUIT:
# if it is quit the game
pygame.quit()
exit(0)
if event.type == pygame.KEYDOWN:
if event.key==K_w:
keys[0]=True
elif event.key==K_a:
keys[1]=True
elif event.key==K_s:
keys[2]=True
elif event.key==K_d:
keys[3]=True
if event.type == pygame.KEYUP:
if event.key==pygame.K_w:
keys[0]=False
elif event.key==pygame.K_a:
keys[1]=False
elif event.key==pygame.K_s:
keys[2]=False
elif event.key==pygame.K_d:
keys[3]=False
if event.type==pygame.MOUSEBUTTONDOWN:
shoot.play()
position=pygame.mouse.get_pos()
acc[1]+=1
arrows.append([math.atan2(position[1]-(playerpos1[1]+32),position[0]-(playerpos1[0]+26)),playerpos1[0]+32,playerpos1[1]+32])
# 9 - Move player
if keys[0]:
playerpos[1]-=5
elif keys[2]:
playerpos[1]+=5
if keys[1]:
playerpos[0]-=5
elif keys[3]:
playerpos[0]+=5
#10 - Win/Lose check
if pygame.time.get_ticks()>=90000:
running=0
exitcode=1
if healthvalue<=0:
running=0
exitcode=0
if acc[1]!=0:
accuracy=acc[0]*1.0/acc[1]*100
else:
accuracy=0
# 11 - Win/lose display
if exitcode==0:
pygame.font.init()
font = pygame.font.Font(None, 24)
text = font.render("Accuracy: "+str(accuracy)+"%", True, (255,0,0))
textRect = text.get_rect()
textRect.centerx = screen.get_rect().centerx
textRect.centery = screen.get_rect().centery+24
screen.blit(gameover, (0,0))
screen.blit(text, textRect)
else:
pygame.font.init()
font = pygame.font.Font(None, 24)
text = font.render("Accuracy: "+str(accuracy)+"%", True, (0,255,0))
textRect = text.get_rect()
textRect.centerx = screen.get_rect().centerx
textRect.centery = screen.get_rect().centery+24
screen.blit(youwin, (0,0))
screen.blit(text, textRect)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
pygame.display.flip()
| [
"[email protected]"
] | |
ba120d8ad2297c04567a816a41b868e572456e7c | 8837e0973de895880fbd536e2e7b824e06a0c53b | /05. Static and Class Methods/02. Exercise/project_04.Gym/main.py | 7b624794d595f6320b77f8ed556fc2021ae03b38 | [] | no_license | Tuchev/Python-OOP---June---2021 | 3cc5e9206f910262d567f4f151bb6cd1b17779fe | 27a0620686d547eafb466f42d951a2bdc90e7fdc | refs/heads/main | 2023-07-07T05:31:27.384073 | 2021-07-24T12:22:45 | 2021-07-24T12:22:45 | 387,717,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | from project.customer import Customer
from project.equipment import Equipment
from project.exercise_plan import ExercisePlan
from project.gym import Gym
from project.subscription import Subscription
from project.trainer import Trainer
customer = Customer("John", "Maple Street", "[email protected]")
equipment = Equipment("Treadmill")
trainer = Trainer("Peter")
subscription = Subscription("14.05.2020", 1, 1, 1)
plan = ExercisePlan(1, 1, 20)
gym = Gym()
gym.add_customer(customer)
gym.add_equipment(equipment)
gym.add_trainer(trainer)
gym.add_plan(plan)
gym.add_subscription(subscription)
print(Customer.get_next_id())
print(gym.subscription_info(1))
| [
"[email protected]"
] | |
ac929aa5362835a026bf7b815d74506b862fd612 | f6116e635cc2fcc9a930ae3ed1c42e9b61088a3a | /banzai/tests/test_mosaic_maker.py | 5d9198470b0ff3ed0aa7aa3100bf1b15d25dd503 | [] | no_license | drhaz/banzai | 40d694ec49e747a255669769ae1da98f1039e2cf | 5847c60c1b3f347a82840d5d527bd26f80edb4d7 | refs/heads/master | 2021-01-25T04:36:20.627371 | 2018-06-04T23:07:46 | 2018-06-04T23:07:46 | 93,456,065 | 0 | 0 | null | 2017-10-10T16:08:30 | 2017-06-05T23:30:35 | Python | UTF-8 | Python | false | false | 4,158 | py | from __future__ import absolute_import, division, print_function, unicode_literals
from banzai.mosaic import get_mosaic_size, MosaicCreator
from banzai.tests.utils import FakeImage
import numpy as np
class FakeMosaicImage(FakeImage):
def __init__(self, *args, **kwargs):
super(FakeMosaicImage, self).__init__(*args, **kwargs)
self.extension_headers = None
def update_shape(self, nx, ny):
pass
def test_get_mosaic_size():
detsecs = [['[1:100,1:100]', '[1:100,200:101]', '[200:101,1:100]', '[200:101,200:101]'],
['[1:200,400:201]', '[1:200,1:200]', '[400:201,400:201]', '[400:201,1:200]'],
['[600:301,600:301]', '[600:301,1:300]', '[1:300,1:300]', '[1:300,600:301]'],
['[800:401,1:400]', '[800:401,800:401]', '[1:400,800:401]', '[1:400,1:400]'],
['[800:401,1:400]', None, '[1:400,800:401]', '[1:400,1:400]'],
[None, None, None, None]]
expected_mosaic_sizes = [(200, 200), (400, 400), (600, 600), (800, 800), (800, 800), (1, 1)]
for i, detsec in enumerate(detsecs):
fake_image = FakeMosaicImage()
fake_image.extension_headers = [{'DETSEC': d} for d in detsec]
assert expected_mosaic_sizes[i] == get_mosaic_size(fake_image, 4)
def test_no_input_images():
mosaic_creator = MosaicCreator(None)
images = mosaic_creator.do_stage([])
assert len(images) == 0
def test_group_by_keywords():
mosaic_creator = MosaicCreator(None)
assert mosaic_creator.group_by_keywords is None
def test_2d_images():
pass
def test_missing_detsecs():
pass
def test_missing_datasecs():
pass
def test_mosaic_maker():
detsecs = [['[1:100,1:100]', '[1:100,200:101]', '[200:101,1:100]', '[200:101,200:101]'],
['[1:200,400:201]', '[1:200,1:200]', '[400:201,400:201]', '[400:201,1:200]'],
['[600:301,600:301]', '[600:301,1:300]', '[1:300,1:300]', '[1:300,600:301]'],
['[800:401,1:400]', '[800:401,800:401]', '[1:400,800:401]', '[1:400,1:400]']]
datasecs = ['[1:100,1:100]', '[1:200,1:200]', '[1:300,1:300]', '[1:400,1:400]']
expected_mosaic_sizes = [(200, 200), (400, 400), (600, 600), (800, 800)]
expected_quad_slices = [[(slice(0, 100), slice(0, 100)), (slice(199, 99, -1), slice(0, 100)),
(slice(0, 100), slice(199, 99, -1)), (slice(199, 99, -1), slice(199, 99, -1))],
[(slice(399, 199, -1), slice(0, 200)), (slice(0, 200), slice(0, 200)),
(slice(399, 199, -1), slice(399, 199, -1)), (slice(0, 200), slice(399, 199, -1))],
[(slice(599, 299, -1), slice(599, 299, -1)), (slice(0, 300), slice(599, 299, -1)),
(slice(0, 300), slice(0, 300)), (slice(599, 299, -1), slice(0, 300))],
[(slice(0, 400), slice(799, 399, -1)), (slice(799, 399, -1), slice(799, 399, -1)),
(slice(799, 399, -1), slice(0, 400)), (slice(0, 400), slice(0, 400))]]
data_sizes = [(4, 100, 100), (4, 200, 200), (4, 300, 300), (4, 400, 400)]
data_arrays = []
bpm_arrays = []
fake_images = []
for i, detsec in enumerate(detsecs):
data = np.random.uniform(0, 1, size=data_sizes[i])
data_arrays.append(data)
bpm = np.random.choice([0, 1], size=data_sizes[i])
bpm_arrays.append(bpm)
image = FakeMosaicImage()
image.ny, image.nx = data_sizes[i][1:]
image.data = data.copy()
image.bpm = bpm.copy()
image.extension_headers = []
for j in range(4):
image.extension_headers.append({'DATASEC': datasecs[i], 'DETSEC': detsec[j]})
fake_images.append(image)
mosaic_creator = MosaicCreator(None)
mosaiced_images = mosaic_creator.do_stage(fake_images)
for i, image in enumerate(mosaiced_images):
assert image.data.shape == expected_mosaic_sizes[i]
for j, s in enumerate(expected_quad_slices[i]):
np.testing.assert_allclose(image.data[s], data_arrays[i][j])
np.testing.assert_allclose(image.bpm[s], bpm_arrays[i][j])
| [
"[email protected]"
] | |
ad1902e382ad9e14f55a2d4603dc7c2422cd8650 | 547e3cb48d93b00ce8542d01eab0d2568ad63076 | /run3.py | 7dfbb9698e824110e2205f5a39b69e5c19752e72 | [] | no_license | QiushiPan/EMORSC | efeceab1a2e97bf5b29056755d8a4561acdaccf0 | 4da0e7f1f35aa26614b9ed494eb5f8b9e43ae05e | refs/heads/master | 2020-05-07T01:01:21.819643 | 2019-04-09T01:33:19 | 2019-04-09T01:33:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,052 | py | import tensorflow as tf
import numpy as np
import cv2
from skimage import io, transform
import os
from PIL import Image
import math
# 定义读取图片信息的函数
def inputimage(input_img_path, file):
img = io.imread(input_img_path + file) # 用skimage读取目标图片
output_height = img.shape[0] # 得到目标图片的长和宽
output_width = img.shape[1]
inputimg = transform.resize(img, (416, 416, 3)) # 将图片尺寸改成yolo输入的图片尺寸
inputimg = np.reshape(inputimg, [-1, 416, 416, 3])# 使用numpy把图片转换成矩阵
return output_height, output_width, inputimg
# 定义展示结果的函数
def showresult(input_img_path, file, predictionsdir, logo):
# im = Image.open(predictionsdir) # 用PIL贴商标
# im.paste(logo, (0, 0)) # 贴到左上角(0,0)的位置
# im.save(predictionsdir) # 保存
predictions = cv2.imread(predictionsdir) # 读取处理好的结果图
oimage = cv2.imread(input_img_path + file) # 读取原图片
cv2.imshow("Predictions", predictions) # 展示结果图
cv2.imshow("Original", oimage) # 展示原图
cv2.waitKey(1500) # 等待3秒,处理下一张图片
# 定义sigmoid激活函数,用以给置信度加入非线性因素
def sigmoid(x):
return 1. / (1. + np.exp(-x))
# 定义softmax函数,给数组中的每个数值分配权重,数值大的会分配更大的比重
def softmax(x):
e_x = np.exp(x - np.max(x))# 处理对象是数组
out = e_x / e_x.sum()
return out
# 定义iou函数,计算识别网格和定义网格的重合度
def iou(boxA, boxB):
# 确定两个矩形交集的对角坐标(确定两个对角的坐标,即可确定面积)
xA = max(boxA[0], boxB[0])#左上角横坐标
yA = max(boxA[1], boxB[1])#左上角纵坐标
xB = min(boxA[2], boxB[2])#右下角横坐标
yB = min(boxA[3], boxB[3])#右下角纵坐标
# 计算交集矩形的总面积
intersection_area = (xB - xA + 1) * (yB - yA + 1)
# 分别计算两个矩形的面积
boxA_area = (boxA[2] - boxA[0] + 1) * (boxA[3] - boxA[1] + 1)
boxB_area = (boxB[2] - boxB[0] + 1) * (boxB[3] - boxB[1] + 1)
# 计算两个矩形的重合程度,重合度 = 交集面积 / ( 矩形A面积 + 矩形B面积 - 交集面积 )
iou = intersection_area / float(boxA_area + boxB_area - intersection_area)
return iou # 函数返回重合度
# 定义函数,在置信度合格的目标框中,将区域重合的多个只保留一个
def non_maximal_suppression(thresholded_predictions, iou_threshold):
nms_predictions = []# 定义数组,包存本函数的输出结果
# 首先将置信度最高的目标框添加到输出数组中,保证置信度最高的网格不会被删除掉
nms_predictions.append(thresholded_predictions[0])
# 从第二个目标框开始,剔除高度重合的目标框
# thresholded_predictions[i][0] = [x1,y1,x2,y2]
# thresholded_predictions每行的第一个元素是目标框的左上和右下坐标
i = 1
while i < len(thresholded_predictions):
n_boxes_to_check = len(nms_predictions)# 得出置信度更高,并且已经验证与其他目标框重合低的目标框数量
to_delete = False# 删除该目标框的标志位
j = 0
while j < n_boxes_to_check:
# 遍历已经确认输出的目标框,计算本目标框与每个框的重合度
curr_iou = iou(thresholded_predictions[i][0], nms_predictions[j][0])
# 如果被验证的框,与任何一个置信度更高的框重合度高,将删除标志位改成TRUE
if (curr_iou > iou_threshold):
to_delete = True
j = j + 1
# 如果删除标志位是False,将该框加入到输出中
if to_delete == False:
nms_predictions.append(thresholded_predictions[i])
i = i + 1
return nms_predictions
# 处理网络计算结果的函数
def postprocessing(predictions, input_img_path, score_threshold, iou_threshold, output_height, output_width):
input_image = cv2.imread(input_img_path)# 读取图片
input_image = cv2.resize(input_image, (output_width, output_height), interpolation=cv2.INTER_CUBIC)# 将图片拉伸成网络的输出尺寸
n_grid_cells = 13# 整个图片被分成13*13个区域
n_b_boxes = 5#每个区域定义5个box,可以理解成用以画框匹配目标框
# 定义标签与RGB颜色
classes = ["yuyin", "baojing", "tft", "daozha", "xianshi", "ludeng", "jingtai", "chongdian", "jiaotong", "cheku"]
colors = [(254.0, 0, 254), (239.88888888888889, 211.66666666666669, 127),
(225.77777777777777, 169.33333333333334, 0), (211.66666666666669, 127.0, 254),
(197.55555555555557, 84.66666666666667, 127), (183.44444444444443, 42.33333333333332, 0),
(169.33333333333334, 0.0, 254), (155.22222222222223, -42.33333333333335, 127),
(141.11111111111111, -84.66666666666664, 0), (127.0, 0, 254)]
realhigh = [20.2, 18.8, 23.7, 21.7, 43.2, 34.6, 48.8, 16.2, 29.4, 50.1]
# tiny-yolo官方定义的anchors数值,在13*13个区域,每个区域根据anchors有5个B-Boxes
anchors = [1.08, 1.19, 3.42, 4.41, 6.63, 11.38, 9.42, 5.11, 16.62, 10.52]
thresholded_predictions = []# 定义数组,置信度合格的网格数据将被添加到该数组中
print('Threshold = {}'.format(score_threshold))
object_area = []# 记录每个目标的面积
areas = []# 记录目标面积正则化的数组
object_score =[]
# 将神经网络计算得到的矩阵重新变形 矩阵的参数 = [ 13 x 13 x (5个B-Boxes) x (4个边框位置值 + 1个目标分数 + 10个目标分类概率) ]
# yolo网络输出包括13x13个区域(grid cells)的参数, 每个区域有5个目标框(B-Boxes), 每个目标框有15个参数:4个边框位置(中心坐标+横纵距离), 1个目标分数(包含目标中心的概率) , 10种分类概率(该目标属于哪一类)
predictions = np.reshape(predictions, (13, 13, 5, 15))
# 遍历13*13个目标区域,挑选出置信度高于threshold的目标框
for row in range(n_grid_cells):
for col in range(n_grid_cells):
for b in range(n_b_boxes):# 遍历每个目标区域中的5个目标框
tx, ty, tw, th, tc = predictions[row, col, b, :5]# 每个区域中的钱5个值代表边框位置和目标分数
# 每个区域的长和宽都是32像素
# YOLOv2预测必须转换为区域全尺寸的参数化坐标
# 该计算方法是官方定义的
center_x = (float(col) + sigmoid(tx)) * 32.0# 目标中心位置转换成图上x轴实际横坐标
center_y = (float(row) + sigmoid(ty)) * 32.0# 目标中心位置转换成图上y轴实际横坐标
roi_w = np.exp(tw) * anchors[2 * b + 0] * 32.0# 计算目标框距离中心的横轴距离
roi_h = np.exp(th) * anchors[2 * b + 1] * 32.0# 计算目标框距离中心的纵轴距离
# 计算目标框的四角位置,中心位置加减距离即可得到。
left = int(center_x - (roi_w / 2.))
right = int(center_x + (roi_w / 2.))
top = int(center_y - (roi_h / 2.))
bottom = int(center_y + (roi_h / 2.))
final_confidence = sigmoid(tc)# 使用sigmoid激活函数计算该目标框包含目标中心的概率
# 找到该目标框中目标的最优分类
class_predictions = predictions[row, col, b, 5:]# 取出每个目标框的后10个参数,代表10种分类的概率
class_predictions = softmax(class_predictions)# 使用softmax函数给分类概率重新分配权重
class_predictions = tuple(class_predictions)# 把概率数组转换成元组,便于查找
best_class = class_predictions.index(max(class_predictions))# 找到概率最大值所在的位置,即最优分类
best_class_score = class_predictions[best_class]# 得到最大概率分类的概率
# 置信度 = B-boxes包含目标中心的概率 * 最优分类的概率
# 置信度高于主函数定义的threshold,即可通过第一次筛选
if ((final_confidence * best_class_score) > score_threshold):
# 置信度通过后,记录目标框位置、置信度和最优分类
thresholded_predictions.append(
[[left, top, right, bottom], final_confidence * best_class_score, best_class, [center_x, center_y]])
# 根据置信度将所有目标框(B-boxes)排序
thresholded_predictions.sort(key=lambda tup: tup[1], reverse=True)#根据第2个元素排序,在第147行,该元素就是置信度
if len(thresholded_predictions) == 0 :
# 如果没有B-boxes通过置信度筛选,直接返回原图,不需要画框等操作
return input_image
print('These {} B-boxes has a score higher than threshold:'.format(len(thresholded_predictions)))
for i in range(len(thresholded_predictions)):
# 打印通过置信度筛选的目标框信息
print('B-Box {} : {}'.format(i + 1, thresholded_predictions[i]))
# 因为可能存在同一目标在多个目标框中的置信度很高,从而对同一目标识别出多个目标框
# 所以要通过目标框的重合度来筛选,多个目标框彼此重合度比较高的情况下,只保留置信度最高的一个
print('IOU higher than {} will be considered as the same object'.format(iou_threshold))
nms_predictions = non_maximal_suppression(thresholded_predictions, iou_threshold)# 得到去除重复的目标框集合
# 打印最终识别出的目标,求出各目标面积,除以系数,计入数组
print('{} B-Boxes has the finial object:'.format(len(nms_predictions)))
for i in range(len(nms_predictions)):
object = nms_predictions[i]
area = (object[0][3]-object[0][1])*(object[0][2]-object[0][0])/pow(realhigh[object[2]],2)
object_area.append(area)
print('B-Box {} : {}'.format(i + 1, nms_predictions[i]))
# 正则化目标面积数组
for i in range(len(nms_predictions)):
areas.append(pow(object_area[i]/np.max(object_area), 2))
print(areas)
# 根据面积和置信度
for i in range(len(nms_predictions)):
object = nms_predictions[i]
center = object[3]
distance = math.sqrt((pow(center[0]-208, 2) + pow(center[1]-208, 2))/pow(208, 2))
final_score = (object[1] + areas[i] + 1 - distance ) / 3
object_score.append(final_score)
print('B-Box {} : {} {}'.format(i + 1, nms_predictions[i], final_score))
final_class = object_score.index(max(object_score))
final_object = nms_predictions[final_class]
picture_name = classes[final_object[2]]
picture_score = str(object_score[final_class]*100)[:5]+"%"
print("this picture is {}".format(picture_name))
# 为识别出的目标画框
for i in range(len(nms_predictions)):
color = colors[nms_predictions[i][2]]# 每种分类的目标画框颜色不同
best_class_name = classes[nms_predictions[i][2]]# 最优分类的标签名称
score = str(nms_predictions[i][1]*100)[:4]# 得出正确率的前四位,保留到小数点后两位
labels = best_class_name + " " + score +"%"# 组合出目标的标签和准确率,打印到图上
# yolo网络使用416*416的图片预测,在不同尺寸的原图上画框,通过换算得知左上角和右下角的实际坐标
start_x = int(nms_predictions[i][0][0]*output_width/416)
start_y = int(nms_predictions[i][0][1]*output_height/416)
end_x = int(nms_predictions[i][0][2]*output_width/416)
end_y = int(nms_predictions[i][0][3]*output_height/416)
# 画框并添加标签和置信度
input_image = cv2.rectangle(input_image, (start_x, start_y), (end_x, end_y), color,5)# 用框画出目标框
input_image = cv2.rectangle(input_image, (start_x-3, start_y), (start_x+len(labels)*14, start_y+20), color, -1)# 在目标上方,画实心矩形作为标签的背景
cv2.putText(input_image, labels, (start_x,start_y+15),cv2.FONT_HERSHEY_COMPLEX_SMALL , 1, (255,255,255), 1)# 将标签的文字打印到图上
input_image = cv2.rectangle(input_image, (0, output_height-25), (300, output_height), (255, 0, 0), -1) # 在目标上方,画实心矩形作为标签的背景
cv2.putText(input_image, picture_score+" is "+picture_name, (0, output_height-5), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (255,255,255), 1)
return input_image # 返回处理后的图像
def main(_):
# 定义各种路径
# input_img_path = '../bkrc_v1/'# 保存黑色背景测试图片的路径
# model = './model/bkrc_v1.pb'# pb格式的黑色背景测试模型
input_img_path = '../bkrc_v2/'# 保存彩色背景测试图片的路径
model = './model/bkrc_v2.pb'# pb格式的彩色背景测试模型
logoimage = "./images/bkrclogo.jpg"# logo图片的保存地址
predictionsdir = "./images/predictions.jpg"# YOLO输出结果图的缓存地址
logo = Image.open(logoimage)# 读取logo图片
# 定义两个参数,挑选概率高的目标识别。
score_threshold = 0.3 # 筛选置信度,置信度 = 目标类别最大概率*该网格拥有目标的中心的概率
iou_threshold = 0.3 # 筛选标记框的重合程度
with tf.Graph().as_default():# 进入Tensorflow的默认图
output_graph_def = tf.GraphDef()
with open(model, "rb") as f:# 把pb格式模型文件读取到图中的默认会话里
output_graph_def.ParseFromString(f.read())# 读取模型中的数据
_ = tf.import_graph_def(output_graph_def, name="")# 配置到图中
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.333)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:# 进入默认会话
init = tf.global_variables_initializer()# 初始化变量的语句
sess.run(init)# 会话运行初始化变量
#网络结构和参数已经定义好,定义变量来通过模型中的名字,代表网络的输入和输出
input_x = sess.graph.get_tensor_by_name("input:0")# 定义模型中输入的变量(便于识别时feed用)
out_label = sess.graph.get_tensor_by_name("output:0")# 定义程序里,代表模型中输处的变量
while(1):
for root, dirs, files in os.walk(input_img_path):# 遍历目标文件夹中的图片
for file in files:# 只遍历图片文件,不管路径和子文件夹
# 读取图片,得到原图的尺寸,并将原图转换成对应tiny_yolo网络输入的416*416*3矩阵
output_height, output_width, inputimg = inputimage(input_img_path, file)
print("Start Recognizing")
# 把图片的矩阵放入网络中识别
# 得到网络的输出,包括每个网格的目标概率、信任值和目标尺寸等数据
img_out = sess.run(out_label, feed_dict={input_x: inputimg})
print("Finish")
# 网络输出的信息和图片一起,放到函数中选出识别概率高的目标,并且画框标识
output_image = postprocessing(img_out, input_img_path+file, score_threshold, iou_threshold,
output_height, output_width)
cv2.imwrite(predictionsdir, output_image)# 保存结果
#展示结果
showresult(input_img_path, file, predictionsdir, logo)
if __name__ == '__main__':
tf.app.run(main=main)
| [
"[email protected]"
] | |
dbefed477dd464f8460b52600acce025a63d202d | d601564a45d22b84679ae2b859911bfbf5730705 | /mac_changer.py | ac8b0332db5f9f7a05345e706d3bb30b55c6c654 | [
"MIT"
] | permissive | vedanshdwivedi/Ethical-Hacking-Tools-coded-in-Python | dacaf76774de6cb5dc88ae9303fd05ba4264f028 | 376e61a296bb31476ee7429d0e17673cfaa066c2 | refs/heads/master | 2021-06-28T17:44:27.591890 | 2019-01-12T16:53:58 | 2019-01-12T16:53:58 | 164,018,462 | 0 | 1 | MIT | 2020-10-01T09:19:11 | 2019-01-03T20:13:50 | Python | UTF-8 | Python | false | false | 1,809 | py | #!/usr/bin/env python
import subprocess
import optparse
import re
def get_arguments():
parser = optparse.OptionParser()
parser.add_option("-i", "--interface", dest="interface", help="Interface whose MAC Address is to be changed")
parser.add_option("-m", "--mac", dest="new_mac", help="New MAC Address")
(options, arguments) = parser.parse_args()
if not options.interface:
#code to handle error
parser.error("[-] Please specify an interface, use --help for more info.")
elif not options.new_mac:
#code to handle error
parser.error("[-] Please specify a MAC, use --help for more info.")
return options
def change_mac(interface, new_mac):
print("[+] Changing MAC Address for " + interface + " to " + new_mac)
subprocess.call(["ifconfig", interface, "down"])
subprocess.call(["ifconfig", interface, "hw", "ether", new_mac])
subprocess.call(["ifconfig", interface, "up"])
def get_current_mac(interface):
ifconfig_result = subprocess.check_output(["ifconfig", interface])
#read MAC address from output of ifconfig
mac_address_search_result = re.search(r"\w\w:\w\w:\w\w:\w\w:\w\w:\w\w", ifconfig_result)
if mac_address_search_result:
return mac_address_search_result.group(0)
else:
print("[-] Could not read MAC address.")
def check_mac(interface, new_mac):
current_mac = get_current_mac(interface)
if current_mac == new_mac:
print("[+] The MAC Address was successfully changed to " + current_mac)
else:
print("[-] The MAC address did not change.")
options = get_arguments()
current_mac = get_current_mac(options.interface)
print("Current MAC : " + str(current_mac))
change_mac(options.interface, options.new_mac)
check_mac(options.interface, options.new_mac)
| [
"[email protected]"
] | |
52864d73044b964a3cfb97677d58828017653792 | c4ffccd66adf728bcb91a647362003a7ab9ea639 | /二叉树.py | 02eac7ffbdb410ade42f478fea2c06afd69c7aba | [] | no_license | lazyManCoder/dataStructure | 7c5d69eee6ed7079191fa683e199003bd15f4958 | 0bd8f2c35cf90dff748f846c158860ee4c688ef8 | refs/heads/master | 2022-11-23T09:38:20.275630 | 2020-07-31T07:38:15 | 2020-07-31T07:38:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,178 | py | #树的结构
class Node(object):
def __init__(self,item):
self.item = item
self.lchild = None
self.rchild = None
class Tree(object):
'''二叉树'''
def __init__(self):
self.root = None
def add(self, item):
node = Node(item)
if self.root is None:
self.root = node
return
queue = [self.root]
while queue:
cur_node = queue.pop(0)
if cur_node.lchild is None:
cur_node.lchild = node
return
else:
queue.append(cur_node.lchild)
if cur_node.rchild is None:
cur_node.rchild = node
return
else:
queue.append(cur_node.rchild)
def breadth_travel(self):
'''广度遍历'''
if self.root is None:
return
queue = [self.root]
while queue:
cur_node = queue.pop(0)
print(cur_node.item)
if cur_node.lchild is not None:
queue.append(cur_node.lchild)
if cur_node.rchild is not None:
queue.append(cur_node.rchild)
def preorder(self,node):
'''先序遍历'''
if node is None:
return
print(node.item,end=" ")
self.preorder(node.lchild)
self.preorder(node.rchild)
def inorder(self,node):
'''中序遍历'''
if node is None:
return
self.inorder(node.lchild)
print(node.item,end=" ")
self.inorder(node.rchild)
def postorder(self,node):
'''后序遍历'''
if node is None:
return
self.postorder(node.lchild)
self.postorder(node.rchild)
print(node.item,end=" ")
if __name__ == '__main__':
tree = Tree()
tree.add(0)
tree.add(1)
tree.add(2)
tree.add(3)
tree.add(4)
tree.add(5)
tree.add(6)
tree.add(7)
tree.add(8)
tree.add(9)
tree.breadth_travel()
print(" ")
tree.preorder(tree.root)
print(" ")
tree.inorder(tree.root)
print(" ")
tree.postorder(tree.root)
print(" ")
| [
"[email protected]"
] | |
06e5a81adf446a4e89c0684d354eef60a3c02f95 | 6af06b43cd43ce8475b9e2e85ec9826de8ed33bd | /rom_project/rom/migrations/0012_auto_20180715_0708.py | 5dece51e99db2b88b6c5493941b31e927e24346a | [] | no_license | cmn53/capstone-django | b55cdb76c89c6331336e941e620925e30c884a51 | 4532f08da5154090e529d023a2bcfebd3312682d | refs/heads/master | 2022-12-09T20:38:11.554448 | 2018-07-20T17:51:59 | 2018-07-20T17:51:59 | 140,997,841 | 0 | 0 | null | 2022-12-08T02:17:29 | 2018-07-15T04:23:03 | JavaScript | UTF-8 | Python | false | false | 1,621 | py | # Generated by Django 2.0.7 on 2018-07-15 07:08
import json
from django.db import migrations, transaction
from django.contrib.gis.db import models
def load_pattern_data(apps, schema_editor):
Route = apps.get_model('rom', 'Route')
Pattern = apps.get_model('rom', 'Pattern')
with open('rom/fixtures/pattern_data.json') as json_file:
data = json.load(json_file)
for d in data:
try:
with transaction.atomic():
route = Route.objects.get(route_onestop_id = d["route_onestop_id"])
pattern = Pattern(
route = route,
pattern_onestop_id = d["pattern_onestop_id"],
wk_trips = d["wk_trips"],
sa_trips = d["sa_trips"],
su_trips = d["su_trips"],
wk_00_03 = d["wk_00-03"],
wk_03_06 = d["wk_03-06"],
wk_06_09 = d["wk_06-09"],
wk_09_12 = d["wk_09-12"],
wk_12_15 = d["wk_12-15"],
wk_15_18 = d["wk_15-18"],
wk_18_21 = d["wk_18-21"],
wk_21_24 = d["wk_21-24"],
wk_24_28 = d["wk_24-28"]
)
pattern.save()
except Exception as e:
print("%s" %e)
pass
class Migration(migrations.Migration):
dependencies = [
('rom', '0011_pattern'),
]
operations = [
migrations.RunPython(load_pattern_data)
]
| [
"[email protected]"
] | |
622430829c5897eaebf7d321f6d94e5e36286086 | 6af15df5926abf6f61b2d7352c0ec1ec52ffb026 | /shamelaScrapper/shamelaScrapper/items.py | 5ef37ad0e2f0a916de59ff1c35756bb2d6f714d3 | [] | no_license | yjad/Shamla_UDB | c8edb230755de29aecddf8d4f9a61c320bdbea84 | 30882ce5e821f1fe457afb1fd1a44f6c6e21ca60 | refs/heads/master | 2020-09-22T10:57:07.600956 | 2019-12-05T08:38:54 | 2019-12-05T08:38:54 | 225,165,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ShamelaOnlineBookInfo(scrapy.Item):
id = scrapy.Field()
view_count = scrapy.Field()
date_added = scrapy.Field()
tags = scrapy.Field()
rar_link = scrapy.Field()
pdf_link = scrapy.Field()
pdf_links_details=scrapy.Field()
epub_link = scrapy.Field()
online_link = scrapy.Field()
uploading_user = scrapy.Field()
repository = scrapy.Field()
cover_photo = scrapy.Field()
| [
"[email protected]"
] | |
34803b60e99af9025f0ab37ad4ccd0ee275e2027 | 7fbfe53bd8f546e05c547938efb20eb7813f9314 | /accounts/forms.py | a5ec1ac2c99680c0e0b627071a89cc30b0eca129 | [] | no_license | alisolehria/leidos | a48cfca0e016c3d7177a3ac4903489ad8bb53ff2 | ffed4d3d3dd5c17c1054569e9ae5ea2fcdc55623 | refs/heads/master | 2021-06-13T13:44:27.376588 | 2017-03-28T20:40:09 | 2017-03-28T20:40:09 | 79,891,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,140 | py | from django import forms
from django.contrib.auth import (
get_user_model,
)
User = get_user_model()
class LoginForm(forms.Form):
username = forms.CharField(label="",widget=forms.TextInput(attrs={'placeholder': 'Username'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password'}),label="")
def clean(self, *args, **kwargs):
username = self.cleaned_data.get("username")
password = self.cleaned_data.get("password")
user_qs = User.objects.filter(username=username) #queries to get username
if user_qs.count() == 1:
user = user_qs.first() #if found username assign to user
else:
user = None
raise forms.ValidationError("The user does not exist") #raise error saying user doesnt exist
if user is not None:
if not user.check_password(password):
raise forms.ValidationError("Incorrect Password")
if not user.is_active:
raise forms.ValidationError("The User is no longer employeed")
return super(LoginForm, self).clean(*args, **kwargs)
| [
"[email protected]"
] | |
fa4da36793cd7f4cd02bca39eee5f67d551c4b4b | 75e3380bccd3fbec875c1747c108879c66aab3b9 | /JenkinsManager/conf/temppipeline.py | 314dc19d34dd4e0801a5c224aaa698997941aed9 | [] | no_license | rainbowei/op-JenkinsManager-api | a9d63075b3c66b33aa013ec6ec31d59a1fd843a2 | efddebdf441d9cc63604caa1abbc266060638c74 | refs/heads/master | 2023-05-11T06:11:20.889029 | 2021-06-02T14:30:02 | 2021-06-02T14:30:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,653 | py | jenkinFileBody = """
#!groovy
import groovy.transform.Field
import groovy.json.JsonOutput
import groovy.json.JsonSlurperClassic
@Field CHANGE_HOST = 'http://192.168.54.12'
@Field CONSOLE_SCRIPT = "/chj/data/jenkins-data"
@Field SERVICE_TYPE = "ChangE"
try {
node {
parameters {
string(defaultValue: 'type', description: '构建应用类型 1.java 2.python 3.go 4.node.js', name: 'type')
string(defaultValue: 'gitURL', description: 'git地址', name: 'gitURL')
}
stage('checkout') {
try {
checkout([$class: 'GitSCM', branches: [[name: '${branch}']], doGenerateSubmoduleConfigurations: false, userRemoteConfigs: [[credentialsId: 'cd_change_jenkins', url: '${giturl}']]])
} catch (Exception e) {
print(e)
}
}
stage('Build') {
//构建类型为1 属于java 类型应用
//构建类型为2 属于python 类型应用
//构建类型为3 属于go 类型应用
//构建类型为4 属于node 类型应用
try {
if ("$type" == "1") {
sh "mvn clean package -U -DskipTests=true"
} else if ("$type" == "2") {
sh "echo '不需要编译'"
} else if ("$type" == "3") {
sh "go build"
} else if ("$type" == "4") {
sh "rm -rf dist"
sh "cnpm install"
}
}catch (Exception e) {
print(e)
}
}
}
} catch (Exception e ) {
print(e)
}
"""
| [
"[email protected]"
] | |
5fb3fbbdc4e03d9e18c3285d8699a5314bf05f5d | edff3ac1a50be86862ce70ffc0f828c098de421e | /00_Basic/17_Web_Services/weather.py | e6d08de8fd7ae58472c9056dc6b403c5807736c0 | [] | no_license | harris44/PythonMaterial | 05a38b216955d84fc92287504b5981c15164599c | ba3991915bcd89947aba9710d6f87fc5b79c8e8f | refs/heads/master | 2021-07-18T11:22:27.843044 | 2017-10-27T04:18:28 | 2017-10-27T04:18:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | import urllib2
import json
from pprint import pprint
f = urllib2.urlopen('http://api.wunderground.com/api/bef95257316a31ed/conditions/q/CA/San_Francisco.json')
json_string = f.read()
parsed_json = json.loads(json_string)
#pprint(parsed_json)
location = parsed_json['current_observation']['display_location']['city']
temp_f = parsed_json['current_observation']['temp_f']
print "Current temperature in %s is: %s" % (location, temp_f)
f.close()
| [
"[email protected]"
] | |
b93269c09058f0c38dc1a7746272274770f7b9cd | 226b4c0115181b22ff07be989fd49b656e1326cb | /cogs/Role.py | 510590c4fbceb085d652ec8b49ce044a99c0a7e1 | [] | no_license | EugeneJenkins/Discord-Bot | 90bbf151bfdec0109b07d0c3166894a9e4388dea | a3518ba4d7330835deb3957b081bdd4438f661c7 | refs/heads/master | 2022-11-12T02:21:10.415162 | 2020-06-28T18:16:03 | 2020-06-28T18:16:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,767 | py | import discord
from discord.ext import commands
class Role(commands.Cog):
# self.playerCount=0
def __init__(self,bot):
self.bot = bot
self.playerCount=0
#Иницализая кол-во играков
#автоматическая выдача игракам роль
@commands.Cog.listener()
async def on_member_join(self,member):
role = discord.utils.get(member.guild.roles, name="Viewer")
await member.add_roles(role)
@commands.command()
async def showman(self,member):
"""
Получить роль ведущего / Удалить роль
"""
user = member.message.author
role = discord.utils.get(member.guild.roles, name="ShowMan")
#Если у этого пользователя уже есть эта роль, удаляем
if self.checkRole(user.roles,'ShowMan'):
await user.remove_roles(role)
await member.send(f"{user.name}, вы больше не ведущий")
return 0
await user.add_roles(role)
await member.send(f"{user.name}, теперь вы ведущий")
def checkRole(self,role,find):
for name in role:
if find==name.name:
print(find)
return 1
return 0
@commands.command()
async def player(self,member):
"""
Получить роль игрока / Удалить роль
"""
user = member.message.author
role = discord.utils.get(member.guild.roles, name="Player")
userName = member.message.author.name
if self.checkRole(user.roles,'Player'):
self.playerCount=self.playerCount-1
await member.message.author.edit(nick=userName)
await user.remove_roles(role)
await member.send(f"{user.name}, вы больше не игрок")
return
self.playerCount = self.playerCount+1 #Увеличиваем кол-во играков
print( self.playerCount)
await member.message.author.edit(nick=f"{self.playerCount} | "+userName)
await user.add_roles(role)
await member.send(f"{user.name}, теперь вы игрок")
# @commands.command(pass_context=True)
# async def mute(self,ctx,mention:discord.Member):
# role = discord.Member.roles
# await mention.edit(mute=1,deafen=1)
# @commands.command(pass_context=True)
# async def msg(self,ctx,mention:discord.Member,text):
# await mention.send(text)
# @commands.command(pass_context=True)
# async def chnick(self,ctx, nick):
# await ctx.message.author.edit(nick=nick)
def setup(bot):
bot.add_cog(Role(bot)) | [
"[email protected]"
] | |
6930cc2a442caaa023ac0f2ee18c012fd6160eb6 | e12df0199a9bd32f2e28e95d904a58c4e1ebbe1c | /src/lucy/services/face_recognition_utilities/face_recognition_service.py | aec940b66642af12a5491d8a85ef013561a4aaa5 | [] | no_license | ehsonmiraz/Lucy | c01fda89887234f9aa9380d466e523f0702def07 | 069d22a3e49731e82efe134e9e15f6d6f8a15358 | refs/heads/master | 2023-08-18T22:36:20.541601 | 2023-08-04T10:53:10 | 2023-08-04T10:53:10 | 163,743,917 | 6 | 0 | null | 2023-08-04T10:53:12 | 2019-01-01T14:46:48 | Python | UTF-8 | Python | false | false | 3,949 | py | import json
import pickle
import datetime
import os
import cv2
import face_recognition
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import lucy
from lucy.core.console import ConsoleManager as cm
class FaceRecognition:
TOLERANCE = 0.6
MODEL = "hog" # cnn
CURRENT_DIR = os.path.dirname(__file__)
@classmethod
def generate_encodings(cls):
cm.console_output("starting capturing...../")
# time.sleep(2)
cap = cv2.VideoCapture(0)
counter=1
encodingList = []
while True:
success, image = cap.read()
if (not success):
cm.console_output("unable to read")
continue
cm.console_output(success)
# if(image)
image = cv2.resize(image, (480, 360))
locations = face_recognition.face_locations(image, model=cls.MODEL)
encodings = face_recognition.face_encodings(image, locations)
# image = cv2.cvtColor(image,cv2.COLOR_RGB2BGR)
if (len(locations) is 0):
cm.console_output("no face detected")
time.sleep(2)
key = cv2.waitKey(10)
if (key == ord('q') or counter > 10):
break
continue
face_location = locations[0]
encoding = encodings[0]
encodingList.append(encoding)
cm.console_output("Photo " + str(counter) + " clicked")
counter += 1
time.sleep(2)
key = cv2.waitKey(1)
if (key == ord('q') or counter > 10):
break
cv2.destroyAllWindows()
cap.release()
return encodingList
@classmethod
def save_encoding(cls,subject,encodings):
ID=str(datetime.datetime.now())
with open(os.path.join(cls.CURRENT_DIR, '....', 'files', 'faces_list.json') ,"w") as file:
faces_list=json.dump(file)
faces_list.append({
ID:subject
})
with open(os.path.join(cls.CURRENT_DIR, '....', 'files','face_encodings', f'{ID}.pkl') ,"wb") as file :
pickle.dump(encodings,file)
@classmethod
def load_encodings(cls):
known_faces = []
known_names = []
file = open(os.path.join(cls.CURRENT_DIR, '....', 'files', 'faces_list.json'), "w")
faces_list = json.load(file)
for ID in faces_list.keys():
file=open(os.path.join(cls.CURRENT_DIR, '....', 'files', 'face_encodings', f'{ID}.pkl'), "wb")
encoding=pickle.load(file)
known_faces.append(encoding)
known_names.append(faces_list.get(ID))
return known_faces,known_names
def recognise_subject(cls):
known_faces,known_names=cls.load_encodings()
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
image = frame.array
locations = face_recognition.face_locations(image, model=cls.MODEL)
encodings = face_recognition.face_encodings(image, locations)
rawCapture.truncate(0)
print("label 1")
for face_encoding, face_location in zip(encodings, locations):
results = face_recognition.compare_faces(known_faces, face_encoding, cls.TOLERANCE)
match = None
if True in results:
match = known_names[results.index(True)]
lucy.output_engine.respond("Match found : " + match)
else:
lucy.output_engine.respond("I dont know yet you can add this face to my database")
def add_face_to_db(cls,subject):
encodings=cls.generate_encodings()
cls.save_encoding(subject,encodings)
lucy.output_engine.respond(f"saved {subject}'s face to my database")
if(__name__=='__main__'):
FaceRecognition.add_face_to_db("ehson") | [
"[email protected]"
] | |
0c3eaa5e055df080fed204c30783825a33e822bd | b1c6d0c6fb4c5ba8683a93471de3e8b31291aabe | /venv/UI/pie.py | 2400b5e10c3a5333bdfe358e18554a4a4b5352c7 | [] | no_license | PeachtaoYang/visualization | 79fbb866d432ecd3d3e6f3cb0fe358003a2bdd6d | ea5b29da0195b687db8eede1bf5376694396e121 | refs/heads/master | 2022-10-10T16:56:08.305942 | 2020-06-10T14:02:05 | 2020-06-10T14:02:27 | 270,919,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,161 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pie.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import QIcon
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(368, 259)
self.setFixedSize(368, 259)
self.widget = QtWidgets.QWidget(Dialog)
self.widget.setGeometry(QtCore.QRect(20, 30, 331, 122))
self.widget.setObjectName("widget")
self.formLayout = QtWidgets.QFormLayout(self.widget)
self.formLayout.setContentsMargins(0, 0, 0, 0)
self.formLayout.setObjectName("formLayout")
self.label = QtWidgets.QLabel(self.widget)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.lineEdit = QtWidgets.QLineEdit(self.widget)
self.lineEdit.setObjectName("lineEdit")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEdit)
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.comboBox = QtWidgets.QComboBox(self.widget)
self.comboBox.setObjectName("comboBox")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.comboBox)
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.lineEdit_2 = QtWidgets.QLineEdit(self.widget)
self.lineEdit_2.setObjectName("lineEdit_2")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.lineEdit_2)
self.label_4 = QtWidgets.QLabel(self.widget)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_4)
self.comboBox_2 = QtWidgets.QComboBox(self.widget)
self.comboBox_2.setObjectName("comboBox_2")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.comboBox_2)
self.label_5 = QtWidgets.QLabel(self.widget)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_5)
self.checkBox = QtWidgets.QCheckBox(self.widget)
self.checkBox.setText("")
self.checkBox.setObjectName("checkBox")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.checkBox)
self.widget1 = QtWidgets.QWidget(Dialog)
self.widget1.setGeometry(QtCore.QRect(60, 200, 231, 25))
self.widget1.setObjectName("widget1")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.widget1)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtWidgets.QPushButton(self.widget1)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_2 = QtWidgets.QPushButton(self.widget1)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.setWindowIcon(QIcon(r'Z:\Data_Visualization\venv\qrc\icons\icon.png'))
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "命名:"))
self.label_2.setText(_translate("Dialog", "分类选择:"))
self.label_3.setText(_translate("Dialog", "分类依据:"))
self.label_4.setText(_translate("Dialog", "数据集:"))
self.label_5.setText(_translate("Dialog", "是否空心:"))
self.pushButton.setText(_translate("Dialog", "绘制"))
self.pushButton_2.setText(_translate("Dialog", "取消"))
| [
"[email protected]"
] | |
dc30afd2f8a8872d158e01edec6df40e305b50a3 | eff5bde8be20945406610e99ad5e19da418f3f4e | /alphazero/agent/agents.py | da092f3e8e270a43644c9e4e0e9984d18910ba27 | [
"MIT"
] | permissive | linhongbin-ws/alphazero-gym | b961a46ab8b3568705ec5bf2fada622a22d166c7 | e08c58e4563404a2c02d678dd087611b12091c2b | refs/heads/master | 2023-07-17T23:35:38.515372 | 2021-09-08T06:24:53 | 2021-09-08T06:24:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,362 | py | import random
from collections import defaultdict
from typing import Any, Dict, Tuple, Union
from abc import ABC, abstractmethod
import torch
import torch.nn.functional as F
from torch.nn.utils import clip_grad_norm
import numpy as np
import gym
import hydra
from omegaconf.dictconfig import DictConfig
from alphazero.helpers import stable_normalizer
from alphazero.agent.buffers import ReplayBuffer
from alphazero.agent.losses import A0CLoss
from alphazero.search.mcts import MCTSDiscrete
class Agent(ABC):
"""Abstract base class for the AlphaZero agent.
Defines the interface and some common methods for the discrete and continuous agent.
Attributes
----------
device: torch.device
Torch device. Can be either CPU or cuda.
nn: Union[DiscretePolicy, DiagonalNormalPolicy, DiagonalGMMPolicy, GeneralizedBetaPolicy]
Neural network policy used by this agent.
mcts: Union[MCTSDiscrete, MCTSContinuous]
Tree search algorithm. Continuous MCTS used progressive widening.
loss: Union[AlphaZeroLoss, A0CLoss, A0CLossTuned]
Loss object to train the policy.
optimizer: torch.optim.Optimizer
Pytorch optimizer object for performing gradient descent.
final_selection: str
String indicating how the final action should be chosen. Can be either "max_visit"
or "max_value".
train_epochs: int
Number of training epochs per episode.
clip: float
Value for gradient clipping.
"""
def __init__(
self,
policy_cfg: DictConfig,
loss_cfg: DictConfig,
mcts_cfg: DictConfig,
optimizer_cfg: DictConfig,
final_selection: str,
train_epochs: int,
grad_clip: float,
device: str,
) -> None:
"""Initializer for common attributes of all agent instances.
Parameters
----------
policy_cfg: DictConfig
Hydra configuration object for the policy.
loss_cfg: DictConfig
Hydra configuration object for the loss.
mcts_cfg: DictConfig
Hydra configuration object for the MCTS.
optimizer_cfg: DictConfig
Hydra configuration object for the SGD optimizer.
final_selection: str
String identifier for the final selection policy. Can be either "max_visit"
or "max_value".
train_epochs: int
Number of training epochs per episode step.
grad_clip: float
Gradient clipping value.
device: str
Device used to train the network. Can be either "cpu" or "cuda".
"""
# instantiate network
self.device = torch.device(device)
self.nn = hydra.utils.call(policy_cfg).to(torch.device(device))
self.mcts = hydra.utils.instantiate(mcts_cfg, model=self.nn)
self.loss = hydra.utils.instantiate(loss_cfg).to(self.device)
self.optimizer = hydra.utils.instantiate(
optimizer_cfg, params=self.nn.parameters()
)
self.final_selection = final_selection
self.train_epochs = train_epochs
self.clip = grad_clip
@abstractmethod
def act(
self,
) -> Tuple[Any, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Interface for the act method (interaction with the environment)."""
...
@abstractmethod
def update(
self, obs: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
) -> Dict[str, float]:
"""Interface for a single gradient descent update step."""
...
@property
def action_dim(self) -> int:
"""Returns the dimensionality of the action space as int."""
return self.nn.action_dim
@property
def state_dim(self) -> int:
"""Returns the dimensionality of the state space as int."""
return self.nn.state_dim
@property
def n_hidden_layers(self) -> int:
"""Returns the number of hidden layers in the policy network as int."""
return self.nn.n_hidden_layers
@property
def n_hidden_units(self) -> int:
"""Computes the total number of hidden units and returns them as int."""
return self.nn.n_hidden_units
@property
def n_rollouts(self) -> int:
"""Returns the number of MCTS search iterations per environment step."""
return self.mcts.n_rollouts
@property
def learning_rate(self) -> float:
"""Float learning rate of the optimizer."""
return self.optimizer.lr
@property
def c_uct(self) -> float:
"""Constant (float) in the MCTS selection policy weighing the exploration term (UCTS constant)."""
return self.mcts.c_uct
@property
def gamma(self) -> float:
"""Returns the MCTS discount factor as float."""
return self.mcts.gamma
def reset_mcts(self, root_state: np.ndarray) -> None:
"""Reset the MCTS by setting the root node to a target environment state.
Parameters
----------
root_state: np.ndarray
Environment state defining the new root node.
"""
self.mcts.root_node = None
self.mcts.root_state = root_state
def train(self, buffer: ReplayBuffer) -> Dict[str, Any]:
"""Implementation of a training loop for the neural network.
The training loop is executed after each environment episode. It is the same
for both continuous and discrete agents. Differences are in the update method
which must be implemented for each agent individually.
Parameters
----------
buffer: ReplayBuffer
Instance of the replay buffer class containing the training experiences.
Returns
-------
Dict[str, Any]
Dictionary holding the values of all loss components as float. Keys are the names
of the loss components.
"""
buffer.reshuffle()
running_loss: Dict[str, Any] = defaultdict(float)
for epoch in range(self.train_epochs):
for batches, obs in enumerate(buffer):
loss = self.update(obs)
for key in loss.keys():
running_loss[key] += loss[key]
for val in running_loss.values():
val = val / (batches + 1)
return running_loss
class DiscreteAgent(Agent):
"""Implementation of an AlphaZero agent for discrete action spaces.
The Discrete agent handles execution of the MCTS as well as network training.
It interacts with the environment through the act method which executes the search
and returns the training data.
Implements an update step for the discrete algorithm is in the update method.
Attributes
----------
temperature : float
Temperature parameter for the normalization procedure in the action selection.
"""
def __init__(
self,
policy_cfg: DictConfig,
mcts_cfg: DictConfig,
loss_cfg: DictConfig,
optimizer_cfg: DictConfig,
final_selection: str,
train_epochs: int,
grad_clip: float,
temperature: float,
device: str,
) -> None:
"""Constructor for the discrete agent.
Delegates the initialization of components to the ABC constructor.
Parameters
----------
policy_cfg: DictConfig
Hydra configuration object for the policy.
loss_cfg: DictConfig
Hydra configuration object for the loss.
mcts_cfg: DictConfig
Hydra configuration object for the MCTS.
optimizer_cfg: DictConfig
Hydra configuration object for the SGD optimizer.
final_selection: str
String identifier for the final selection policy. Can be either "max_visit"
or "max_value".
train_epochs: int
Number of training epochs per episode step.
grad_clip: float
Gradient clipping value.
temperature: float
Temperature parameter for normalizing the visit counts in the final
selection policy.
device: str
Device used to train the network. Can be either "cpu" or "cuda".
"""
super().__init__(
policy_cfg=policy_cfg,
loss_cfg=loss_cfg,
mcts_cfg=mcts_cfg,
optimizer_cfg=optimizer_cfg,
final_selection=final_selection,
train_epochs=train_epochs,
grad_clip=grad_clip,
device=device,
)
assert isinstance(self.mcts, MCTSDiscrete)
# initialize values
self.temperature = temperature
def act( # type: ignore[override]
self,
Env: gym.Env,
deterministic: bool = False,
) -> Tuple[Any, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Main interface method for the agent to interact with the environment.
The act method wraps execution of the MCTS search and final action selection.
It also returns the statistics at the root node for network training.
The choice of the action to be executed can be either based on visitation counts
or on action values. Through the deterministic flag it can be specified if this
choice is samples from the visitation/action value distribution.
Parameters
----------
Env: gym.Env
Gym environment from which the MCTS should be executed.
deterministic: bool = False
If True, the action with the highest visitation count/action value is executed
in the environment. If false, the final action is samples from the visitation count
or action value distribution.
Returns
-------
Tuple[Any, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
A tuple containing the action to be executed in the environment and root node
training information. Elements are:
- action: MCTS-improved action to be executed in the environment.
- state: Root node state vector.
- actions: Root node child actions.
- counts: Visitation counts at the root node.
- Qs: Action values at the root node.
- V: Value target returned from the MCTS.
"""
self.mcts.search(Env=Env)
state, actions, counts, Qs, V = self.mcts.return_results(self.final_selection)
if self.final_selection == "max_value":
# select final action based on max Q value
pi = stable_normalizer(Qs, self.temperature)
action = pi.argmax() if deterministic else np.random.choice(len(pi), p=pi)
else:
# select the final action based on visit counts
pi = stable_normalizer(counts, self.temperature)
action = pi.argmax() if deterministic else np.random.choice(len(pi), p=pi)
return action, state, actions, counts, Qs, V
def mcts_forward(self, action: int, node: np.ndarray) -> None:
"""Moves the MCTS root node to the actually selected node.
Using the selected node as future root node implements tree reuse.
Parameters
----------
action: int
Action that has been selected in the environment.
node: np.ndarray
Environment state for the new root node.
"""
self.mcts.forward(action, node)
def update(
self, obs: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
) -> Dict[str, float]:
"""Performs a gradient descent update step.
This is the main training method for the neural network. Given a batch of observations
from the replay buffer, it uses the network, optimizer and loss attributes of
this instance to perform a single update step within the train method.
Parameters
----------
obs: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
Batch of observations. Contains:
- states: Root node states.
- actions: Selected actions at each root node state.
- counts: Visitation counts for the actions at each root state.
- Qs: Action values at the root node (currently unused).
- V_target: Improved MCTS value targets.
Returns
-------
Dict[str, float]
A dictionary where the keys are the name of a loss component (full loss, policy loss, value loss, entropy loss)
and the values are the scalar loss values.
"""
# zero out gradients
for param in self.nn.parameters():
param.grad = None
# Qs are currently unused in update setp
states: np.ndarray
actions: np.ndarray
counts: np.ndarray
V_target: np.ndarray
states, actions, counts, _, V_target = obs
states_tensor = torch.from_numpy(states).float().to(self.device)
values_tensor = (
torch.from_numpy(V_target).unsqueeze(dim=1).float().to(self.device)
)
if isinstance(self.loss, A0CLoss):
actions_tensor = torch.from_numpy(actions).float().to(self.device)
# regularize the counts to always be greater than 0
# this prevents the logarithm from producing nans in the next step
counts += 1
counts_tensor = torch.from_numpy(counts).float().to(self.device)
log_probs, entropy, V_hat = self.nn.get_train_data(
states_tensor, actions_tensor
)
loss_dict = self.loss(
log_probs=log_probs,
counts=counts_tensor,
entropy=entropy,
V=values_tensor,
V_hat=V_hat,
)
else:
action_probs_tensor = F.softmax(
torch.from_numpy(counts).float(), dim=-1
).to(self.device)
pi_logits, V_hat = self.nn(states_tensor)
loss_dict = self.loss(pi_logits, action_probs_tensor, V_hat, values_tensor)
loss_dict["loss"].backward()
if self.clip:
clip_grad_norm(self.nn.parameters(), self.clip)
self.optimizer.step()
info_dict = {key: float(value) for key, value in loss_dict.items()}
return info_dict
class ContinuousAgent(Agent):
"""Implementation of an A0C agent for continuous action spaces.
The Continuous agent handles execution of the MCTS as well as network training.
It interacts with the environment through the act method which executes the search
and returns the training data.
Implements an update step for the A0C loss in the update method.
The differences between the continuous agent and the discrete agent are:
- The continuous agent uses an MCTS with progressive widening.
- Only the A0C loss and the tuned A0C loss work for this agent.
- The policy network must use either a normal distribution, a GMM or a Beta distribution.
Attributes
----------
temperature : float
Temperature parameter for the normalization procedure in the action selection.
"""
def __init__(
self,
policy_cfg: DictConfig,
mcts_cfg: DictConfig,
loss_cfg: DictConfig,
optimizer_cfg: DictConfig,
final_selection: str,
epsilon: float,
train_epochs: int,
grad_clip: float,
device: str,
) -> None:
"""Constructor for the discrete agent.
Delegates the initialization of components to the ABC constructor.
Parameters
----------
policy_cfg: DictConfig
Hydra configuration object for the policy.
loss_cfg: DictConfig
Hydra configuration object for the loss.
mcts_cfg: DictConfig
Hydra configuration object for the MCTS.
optimizer_cfg: DictConfig
Hydra configuration object for the SGD optimizer.
final_selection: str
String identifier for the final selection policy. Can be either "max_visit"
or "max_value".
epsilon: float
Epsilon value for epsilon-greedy action selection. Epsilon-greedy is disabled
when this value is set to 0.
train_epochs: int
Number of training epochs per episode step.
grad_clip: float
Gradient clipping value.
device: str
Device used to train the network. Can be either "cpu" or "cuda".
"""
super().__init__(
policy_cfg=policy_cfg,
loss_cfg=loss_cfg,
mcts_cfg=mcts_cfg,
optimizer_cfg=optimizer_cfg,
final_selection=final_selection,
train_epochs=train_epochs,
grad_clip=grad_clip,
device=device,
)
self.epsilon = epsilon
@property
def action_limit(self) -> float:
"""Returns the action bound for this agent as float."""
return self.nn.act_limit
def epsilon_greedy(self, actions: np.ndarray, values: np.ndarray) -> np.ndarray:
"""Epsilon-greedy implementation for the final action selection.
Parameters
----------
actions: np.ndarray
Actions to choose from.
values: np.ndarray
Values according which the best action is selected. Can be either visitation
counts or action values.
Returns
-------
np.ndarray
Action chosen according to epsilon-greedy.
"""
if random.random() < self.epsilon:
return np.random.choice(actions)[np.newaxis]
else:
return actions[values.argmax()][np.newaxis]
def act( # type: ignore[override]
self,
Env: gym.Env,
) -> Tuple[Any, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Main interface method for the agent to interact with the environment.
The act method wraps execution of the MCTS search and final action selection.
It also returns the statistics at the root node for network training.
The choice of the action to be executed can be either the most visited action or
the action with the highest action value. If the epsilon > 0 is specified when
instantiating this agent, actions are selected using the epsilon-greedy algorithm.
Parameters
----------
Env: gym.Env
Gym environment from which the MCTS should be executed.
Returns
-------
Tuple[Any, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
A tuple containing the action to be executed in the environment and root node
training information. Elements are:
- action: MCTS-improved action to be executed in the environment.
- state: Root node state vector.
- actions: Root node child actions.
- counts: Visitation counts at the root node.
- Qs: Action values at the root node.
- V: Value target returned from the MCTS.
"""
self.mcts.search(Env=Env)
state, actions, counts, Qs, V = self.mcts.return_results(self.final_selection)
if self.final_selection == "max_value":
if self.epsilon == 0:
# select the action with the best action value
action = actions[Qs.argmax()][np.newaxis]
else:
action = self.epsilon_greedy(actions=actions, values=Qs)
else:
if self.epsilon == 0:
# select the action that was visited most
action = actions[counts.argmax()][np.newaxis]
else:
action = self.epsilon_greedy(actions=actions, values=counts)
return action, state, actions, counts, Qs, V
def update(
self, obs: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
) -> Dict[str, float]:
"""Performs a gradient descent update step.
This is the main training method for the neural network. Given a batch of observations
from the replay buffer, it uses the network, optimizer and loss attributes of
this instance to perform a single update step within the train method.
Parameters
----------
obs: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
Batch of observations. Contains:
- states: Root node states.
- actions: Selected actions at each root node state.
- counts: Visitation counts for the actions at each root state.
- Qs: Action values at the root node (currently unused).
- V_target: Improved MCTS value targets.
Returns
-------
Dict[str, float]
A dictionary where the keys are the name of a loss component (full loss, policy loss, value loss, entropy loss)
and the values are the scalar loss values.
"""
# zero out gradients
for param in self.nn.parameters():
param.grad = None
# Qs are currently unused in update
states: np.ndarray
actions: np.ndarray
counts: np.ndarray
V_target: np.ndarray
states, actions, counts, _, V_target = obs
actions_tensor = torch.from_numpy(actions).float().to(self.device)
states_tensor = torch.from_numpy(states).float().to(self.device)
counts_tensor = torch.from_numpy(counts).float().to(self.device)
values_tensor = (
torch.from_numpy(V_target).unsqueeze(dim=1).float().to(self.device)
)
log_probs, entropy, V_hat = self.nn.get_train_data(
states_tensor, actions_tensor
)
loss_dict = self.loss(
log_probs=log_probs,
counts=counts_tensor,
entropy=entropy,
V=values_tensor,
V_hat=V_hat,
)
loss_dict["loss"].backward()
if self.clip:
clip_grad_norm(self.nn.parameters(), self.clip)
self.optimizer.step()
info_dict = {key: float(value) for key, value in loss_dict.items()}
return info_dict
| [
"[email protected]"
] | |
6377d548922e3234d32b38a3cf2023fbbb990b8b | 0612a2c9c4e42fd09570a1100da238a602655f29 | /layers/relu.py | 7201d3c4c9e1b21e3d6c5b885e96216abf7de2da | [] | no_license | Mustufain/Convolution-Neural-Network- | df7b274d132a4cb007c26396bf1bdcdc796ea201 | a87ff44356962c8b8ea501986d6f4d7e21165c7a | refs/heads/master | 2021-06-16T01:59:45.224343 | 2021-06-01T19:58:26 | 2021-06-01T19:58:26 | 206,874,150 | 1 | 1 | null | 2021-06-01T19:58:27 | 2019-09-06T21:07:16 | Python | UTF-8 | Python | false | false | 1,038 | py | import numpy as np
class Relu(object):
def __init__(self):
self.params = []
def forward(self, Z):
"""
Forward propogation of relu layer.
Parameters:
Z -- Input data -- numpy array of shape (m, n_H_prev, n_W_prev, n_C_prev)
Returns:
A -- Activations of relu layer-- numpy array of shape m, n_H_prev, n_W_prev, n_C_prev)
"""
self.Z = Z
A = np.maximum(0, Z) # element-wise
return A
def backward(self, dA):
"""
Backward propogation of relu layer.
f′(x) = {1 if x > 0}
{0 otherwise}
Parameters:
dA -- gradient of cost with respect to the output of the relu layer,
same shape as A
Returns:
dZ -- gradient of cost with respect to the input of the relu layer,
same shape as Z
"""
Z = self.Z
dZ = np.array(dA, copy=True)
dZ[Z <= 0] = 0
assert (dZ.shape == self.Z.shape)
return dZ, []
| [
"[email protected]"
] | |
c4726ecb4dc4d8f3bc7921983ab124089255a287 | a820336e1ba1678ea795b97a13e8f24ac1f4047d | /places/apis_views.py | 4020506f55d980ff3981e15daaa25bf2d93e58f1 | [
"MIT"
] | permissive | acdh-oeaw/dig_ed_cat | 492ad0b6ba7c984244e0e78629b98b410b9c8a8f | 3747ba28435848f01da37ebdb74108ef3bcd76c6 | refs/heads/master | 2023-05-28T07:16:25.281149 | 2023-05-23T06:28:00 | 2023-05-23T06:28:00 | 58,523,978 | 12 | 5 | MIT | 2023-05-23T06:28:01 | 2016-05-11T07:39:28 | JavaScript | UTF-8 | Python | false | false | 429 | py | from rest_framework import viewsets
from .serializers import PlaceSerializer, AlternativeNameSerializer
from .models import Place, AlternativeName
class PlaceViewSet(viewsets.ModelViewSet):
queryset = Place.objects.all()
serializer_class = PlaceSerializer
depth = 2
class AlternativNameViewSet(viewsets.ModelViewSet):
queryset = AlternativeName.objects.all()
serializer_class = AlternativeNameSerializer
| [
"[email protected]"
] | |
564d94a9e63642635a9c7487d8171084d56cdf06 | 4c0ae3b579e7cf2b805d5567f63f4802333e3cd6 | /tests/test_fund_me.py | aed5fa2036e96c9cefa8c0b8af4ab317e169e688 | [] | no_license | evansyeung/blockchain_demo_fund_me | 5c0f6b17ff41b6f31d2da26382551118636bcadb | 0d10eecf914fa074c1bd4f2252ffe4d06cfbdf91 | refs/heads/master | 2023-08-11T16:40:10.252077 | 2021-09-24T22:39:55 | 2021-09-24T22:39:55 | 410,117,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | from brownie import network, accounts, exceptions
from scripts.helpful_scripts import LOCAL_BLOCKCHAIN_ENVIRONMENTS, get_account
from scripts.deploy import deploy_fund_me
import pytest
def test_can_fund_and_withdraw():
account = get_account()
fund_me = deploy_fund_me()
entrance_fee = fund_me.getEntranceFee() + 100 # Add 100 just in case we need a little bit more fee
tx = fund_me.fund({"from": account, "value": entrance_fee})
tx.wait(1)
# Check the amount funded against the amount at account.address
assert fund_me.addressToAmountFunded(account.address) == entrance_fee
tx2 = fund_me.withdraw({"from": account})
tx2.wait(1)
assert fund_me.addressToAmountFunded(account.address) == 0
def test_only_owner_can_withdraw():
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
pytest.skip("only for local testing")
account = get_account()
fund_me = deploy_fund_me()
# Gives us random account
bad_actor = accounts.add()
# Tells our test that if withdraw() reverts, then its expected result
with pytest.raises(exceptions.VirtualMachineError):
fund_me.withdraw({"from": bad_actor})
| [
"[email protected]"
] | |
fc5c2e0ea7b801a56507057f6d5836f3b923067a | 4766c06c4adaeec73918929acf402012f34d11ee | /web_flask/7-states_list.py | 801824ac6153582ca093dd9260e0fe7a310fcc76 | [] | no_license | AndZapata/AirBnB_clone_v2 | 79fc1ebb6a4c768af3e5281c296fd236139e400b | 5359cc2166bcc646eb5648d109522cc2e8d6a18d | refs/heads/master | 2020-07-03T19:55:14.719529 | 2019-09-06T00:58:31 | 2019-09-06T00:58:31 | 202,031,910 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | #!/usr/bin/python3
''' starts a Flask web application '''
from flask import Flask, render_template
from models import storage
app = Flask(__name__)
@app.route('/states_list', strict_slashes=False)
def states_hbnb():
''' display “states HBNB!” '''
list_state = []
for i in storage.all("State").values():
list_state.append([i.id, i.name])
return render_template('7-states_list.html', list_state=list_state)
@app.teardown_appcontext
def state_close(error):
''' close the session '''
storage.close()
if __name__ == '__main__':
app.run(host='0.0.0.0')
| [
"[email protected]"
] | |
c3a7c645a77ca3c5bddb1e746bc7610facdf332b | 57e5548b4d3f1b2d9ae40f1fb172534d2dafd4b7 | /myTest2/myApp/kuaidi.py | eb097ada6c2d57b76c31932575cb37505033a476 | [] | no_license | monsterzzz/pythonResult | 2ef7ede59c2c2f572b7af96da79d7cb45c655380 | e43da765121c2a52fb2dc0f4ac661f6464b157b6 | refs/heads/master | 2020-07-16T13:29:15.693571 | 2019-10-15T17:19:27 | 2019-10-15T17:19:27 | 205,795,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,857 | py | # -*- coding: utf-8 -*-
import urllib.request
import urllib.parse
import http.cookiejar
import json
import random
import time
#获取快递公司
def get_comCode(postid):
url_xhr="http://www.kuaidi100.com/autonumber/autoComNum?"
req = urllib.request.Request(url_xhr)
#Http头部
ori_headers = {
'Host': 'www.kuaidi100.com',
'Proxy-Connection': 'keep-alive',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',
'DNT': '1',
'Referer': 'http://www.kuaidi100.com/',
'Accept-Language': 'zh-CN,zh;q=0.8,,en-US;q=0.6,en;q=0.4',
'Origin': 'http://www.kuaidi100.com',
'Content-Length': '203'
}
#url后面的参数
form_data = urllib.parse.urlencode({ #把字典形式转化成url后面的参数格式
'text': postid,
}).encode() #编码成字节
#把http头加入request
for key,value in ori_headers.items():
req.add_header(key,value)
#处理cookie
cj = http.cookiejar.CookieJar() #声明cj用来存放cookie
pro = urllib.request.HTTPCookieProcessor(cj) #利用HTTPCookieProcessor对象来创建cookie处理器
opener = urllib.request.build_opener(pro)
op = opener.open(req,form_data) #调用open方法,发送请求和参数
data_bytes = op.read() #读取获得的数据
data_str = bytes.decode(data_bytes) #字节数据解码
ori_content = json.loads(data_str) #把json数据转化成字典
inner_content = ori_content['auto'][0]['comCode'] #取出auto列表的第一个字典中comCode对应的值,即快递公司
time.sleep(1)
return inner_content
def get_content(postid):
url_xhr = "http://www.kuaidi100.com/query?"
req = urllib.request.Request(url_xhr) #请求对象
#HTTP头
ori_headers = {
'Host' : 'www.kuaidi100.com',
'Connection' : 'keep-alive',
'Accept' : '*/*',
'Origin' : 'http://www.newrank.cn',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'DNT':'1',
'Referer': 'http://www.kuaidi100.com/',
'Accept-Language': 'zh-CN,zh;q=0.8',
}
#处理url后面的其他参数
temp = str(random.random())
type = get_comCode(postid)
form_data = urllib.parse.urlencode({ #转成url后面的参数形式
'type' : type,
'postid' : postid,
'id':'1',
'valicode':'',
'temp':temp,
}).encode() #编码成字节码
#把http头放入request
for kay, value in ori_headers.items():
req.add_header(kay, value)
#处理cookie
cj = http.cookiejar.CookieJar()
pro = urllib.request.HTTPCookieProcessor(cj)
opener = urllib.request.build_opener(pro)
op = opener.open(req, form_data)
data_bytes = op.read()
data_str = bytes.decode(data_bytes)
ori_content = json.loads(data_str) #解析成字典格式
inner_content = ori_content['data']
return inner_content,postid
def add_postid():
id = input("请输入要查询的快递单号")
if id != None:
return id
def main():
postid = add_postid()
print('加载中... ...')
time.sleep(5)
print('即将查询的快递单号为'+postid)
try:
content, postid = get_content(postid)
print('单号为'+postid+'的快递信息为')
for x in content:
print(x['time'] + x['context'])
print('')
except:
print('快递单号错误')
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
dbb575fe1fbb027fc733509cf5a9f2e83359cc66 | 13c5f489482ecf537f6f2822c6a04e83c1c9fc2f | /Project1/mymain_orig.py | 84a013b152c5a059de0fee90a9abaa59c29f407f | [] | no_license | nathanfitz-coder/practicallearning | fd05b0b2171fb1db88cb9c6b7bf2330457188cb8 | 314d683475d907bed06a9724c2e327c5212b0f85 | refs/heads/master | 2023-01-22T18:14:21.100789 | 2020-12-13T03:20:10 | 2020-12-13T03:20:10 | 290,617,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,507 | py | import pandas as pd
import xgboost as xgb
import numpy as np
import time
from scipy.stats import skew
from scipy.special import boxcox1p
from sklearn.linear_model import ElasticNet
def test_train(j):
data = pd.read_csv('Ames_data.csv')
testIDs = pd.read_table("project1_testIDs.dat",header=None,sep = ' ').values
testidx = data.index.isin(testIDs[:,j])
train = data[~testidx]
test = data[testidx]
train.to_csv("train.csv",index=False)
test.drop(['Sale_Price'],axis=1).to_csv("test.csv",index=False)
def grid_elastic(train_x, train_y, test_x, test_y):
alphas = []
lambdas = []
scores = []
for i in range(1000):
a = np.random.random()/1000
l = np.random.random()
alphas.append(a)
lambdas.append(l)
regr = ElasticNet(alpha=a, l1_ratio=l, random_state=42,normalize=True)
#regr = make_pipeline(RobustScaler(), ElasticNet(alpha=a, l1_ratio=l, random_state=42))
regr.fit(train_x, train_y)
y_pred = regr.predict(test_x)
scores.append(np.sqrt(np.mean(np.square(y_pred - test_y))))
bestidx = np.argmin(np.array(scores))
best_a = alphas[bestidx]
best_l = lambdas[bestidx]
best_score = scores[bestidx]
return best_a, best_l, best_score
def all_test_splits(model='elastic'):
split_score = []
if model == 'elastic':
for split in range(0,10):
test_train(split)
a, l, returnf, best_score = main_elastic()
split_score.append(best_score)
else:
for split in range(0,10):
test_train(split)
returnf, best_score = main_xgb()
split_score.append(best_score)
return split_score
def main_elastic(a = 0.00004, l = 0.91, write_pred=False):
train = pd.read_csv('train.csv',index_col='PID')
test = pd.read_csv('test.csv',index_col='PID')
alldata = pd.concat([train, test])
alldata.Garage_Yr_Blt.fillna(alldata.Year_Built, inplace=True)
#MSSubClass=The building class
alldata['MS_SubClass'] = alldata['MS_SubClass'].apply(str)
#Changing OverallCond into a categorical variable
alldata['Overall_Cond'] = alldata['Overall_Cond'].astype(str)
numeric_feats = alldata.dtypes[alldata.dtypes != "object"].index
# Check the skew of all numerical features
skewed_feats = alldata[numeric_feats].apply(lambda x: skew(x.dropna())).sort_values(ascending=False)
#print("\nSkew in numerical features: \n")
skewness = pd.DataFrame({'Skew' :skewed_feats})
skewness = skewness[abs(skewness) > 0.75]
#print("There are {} skewed numerical features to Box Cox transform".format(skewness.shape[0]))
skewed_features = skewness.index
lam = 0.15
for feat in skewed_features:
alldata[feat] = boxcox1p(alldata[feat], lam)
#Adding total square footage
alldata['Total_SF'] = alldata['Total_Bsmt_SF'] + alldata['First_Flr_SF'] + alldata['Second_Flr_SF']
drop_vars = ['Street', 'Utilities', 'Condition_2', 'Roof_Matl', 'Heating', 'Pool_QC', 'Misc_Feature', 'Low_Qual_Fin_SF', 'Pool_Area', 'Longitude','Latitude']
alldata = alldata.drop(columns=drop_vars)
quant_vars = ["Lot_Frontage","Total_SF", "Lot_Area", "Mas_Vnr_Area", "BsmtFin_SF_2", "Bsmt_Unf_SF", "Total_Bsmt_SF", "Second_Flr_SF", 'First_Flr_SF', "Gr_Liv_Area", "Garage_Area", "Wood_Deck_SF", "Open_Porch_SF", "Enclosed_Porch", "Three_season_porch", "Screen_Porch", "Misc_Val"]
for var in quant_vars:
q95 = np.quantile(alldata[var],0.95)
alldata[var][alldata[var]>q95] = q95
alldata = pd.get_dummies(alldata,drop_first=True)
train_x = alldata[alldata.index.isin(train.index)]
test_x = alldata[alldata.index.isin(test.index)]
# train_y = train_x.Sale_Price
# test_y = test_x.Sale_Price
train_y = train.Sale_Price
test_y = test.Sale_Price
train_x = train_x.drop(['Sale_Price'],axis=1)
test_x = test_x.drop(['Sale_Price'],axis=1)
train_y = np.log(train_y)
test_y = np.log(test_y)
# best_a, best_l, best_score = grid_elastic(train_x, train_y, test_x, test_y)
# a = best_a
# l = best_l
regr = ElasticNet(alpha=a, l1_ratio=l, random_state=42,normalize=True)
regr.fit(train_x, train_y)
y_pred = regr.predict(test_x)
returnf = pd.DataFrame(data=np.matrix.transpose(np.array([test_x.index.values,np.exp(y_pred)])), columns=["PID", "Sale_Price"])
if write_pred:
#np.savetxt(fname='mysubmission1.txt',X=y_pred)
returnf.astype({'PID': 'int32'}).to_csv('mysubmission1.txt',index=False)
#return returnf
return a, l, returnf, np.sqrt(np.mean(np.square(y_pred - test_y)))
def main_xgb(write_pred=False):
train = pd.read_csv('train.csv',index_col='PID')
test = pd.read_csv('test.csv',index_col='PID')
alldata = pd.concat([train, test])
alldata.Garage_Yr_Blt.fillna(alldata.Year_Built, inplace=True)
#MSSubClass=The building class
#alldata['MS_SubClass'] = alldata['MS_SubClass'].apply(str)
#Changing OverallCond into a categorical variable
#alldata['Overall_Cond'] = alldata['Overall_Cond'].astype(str)
numeric_feats = alldata.dtypes[alldata.dtypes != "object"].index
# Check the skew of all numerical features
skewed_feats = alldata[numeric_feats].apply(lambda x: skew(x.dropna())).sort_values(ascending=False)
#print("\nSkew in numerical features: \n")
skewness = pd.DataFrame({'Skew' :skewed_feats})
skewness = skewness[abs(skewness) > 0.75]
#print("There are {} skewed numerical features to Box Cox transform".format(skewness.shape[0]))
skewed_features = skewness.index
lam = 0.15
for feat in skewed_features:
alldata[feat] = boxcox1p(alldata[feat], lam)
#Adding total square footage
alldata['Total_SF'] = alldata['Total_Bsmt_SF'] + alldata['First_Flr_SF'] + alldata['Second_Flr_SF']
# drop_vars = ['Street', 'Utilities', 'Condition_2', 'Roof_Matl', 'Heating', 'Pool_QC', 'Misc_Feature', 'Low_Qual_Fin_SF', 'Pool_Area', 'Longitude','Latitude']
# alldata = alldata.drop(columns=drop_vars)
object_cols = list(alldata.dtypes[alldata.dtypes == 'object'].index)
for col in object_cols:
codes, uniques = pd.factorize(alldata[col])
alldata[col]=codes
quant_vars = ["Lot_Frontage","Total_SF", "Lot_Area", "Mas_Vnr_Area", "BsmtFin_SF_2", "Bsmt_Unf_SF", "Total_Bsmt_SF", "Second_Flr_SF", 'First_Flr_SF', "Gr_Liv_Area", "Garage_Area", "Wood_Deck_SF", "Open_Porch_SF", "Enclosed_Porch", "Three_season_porch", "Screen_Porch", "Misc_Val"]
for var in quant_vars:
q95 = np.quantile(alldata[var],0.95)
alldata[var][alldata[var]>q95] = q95
#alldata = pd.get_dummies(alldata,drop_first=True)
train_x = alldata[alldata.index.isin(train.index)]
test_x = alldata[alldata.index.isin(test.index)]
# train_y = train_x.Sale_Price
# test_y = test_x.Sale_Price
train_y = train.Sale_Price
test_y = test.Sale_Price
train_x = train_x.drop(['Sale_Price'],axis=1)
test_x = test_x.drop(['Sale_Price'],axis=1)
train_y = np.log(train_y)
test_y = np.log(test_y)
model_xgb = xgb.XGBRegressor(colsample_bytree=0.4603, gamma=0.0468,
learning_rate=0.05, max_depth=6,
min_child_weight=1.7817, n_estimators=6000,
reg_alpha=0.4640, reg_lambda=0.8571,
subsample=0.5213, silent=1,
random_state =42, nthread = -1)
model_xgb.fit(train_x, train_y, verbose=False)
y_pred = model_xgb.predict(test_x)
score=np.sqrt(np.mean(np.square(y_pred - test_y)))
returnf = pd.DataFrame(data=np.matrix.transpose(np.array([test_x.index.values.astype(int),np.exp(y_pred)])), columns=["PID", "Sale_Price"])
if write_pred:
#np.savetxt(fname='mysubmission2.txt',X=y_pred)
returnf.astype({'PID': 'int32'}).to_csv('mysubmission2.txt',index=False)
#split_score.append(np.sqrt(np.mean(np.square(y_pred - test_y))))
return returnf, score
test_train(2)
tic = time.time()
returnf, score = main_xgb(write_pred=True)
a, l, returnf, score = main_elastic(write_pred=True)
toc = time.time()
difference = int(toc - tic)
# all_test_splits(model='xgbm')
| [
"[email protected]"
] | |
5c85903cdd3d6c0ed256c8312491f4014135a1d1 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5753053697277952_0/Python/JustAMan/task1.py | 1c0e0e97692e8bd3f1dc946aa6e29c179fad09de | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | import sys
A_NUM = ord('A')
def index2party(*idx):
return ''.join(chr(A_NUM + x) for x in idx)
def solve(p):
plan = []
while any(x > 0 for x in p):
idx1, _ = max(enumerate(p), key=lambda (i, v): v)
p[idx1] -= 1
idx2, _ = max(enumerate(p), key=lambda (i, v): v)
p[idx2] -= 1
if 2 * max(p) > sum(p):
# cannot take second step
p[idx2] += 1
plan.append(index2party(idx1))
else:
plan.append(index2party(idx1, idx2))
return ' '.join(plan)
def main(inFile):
with open(inFile) as inp, open(inFile.replace('.in', '.out'), 'w') as out:
T = int(inp.readline().strip())
for t in xrange(T):
N = int(inp.readline().strip())
P = [int(x) for x in inp.readline().strip().split()]
out.write('Case #%d: %s\n' % (t + 1, solve(P)))
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit('Usage: %s input.in' % sys.argv[0])
main(sys.argv[1])
| [
"[email protected]"
] | |
745e5b69c83c08f9c5881bb8001d2312f25c417f | 8e0b9b63771dfc0e9dcef35d86da5c750f1d1dc6 | /tests/everflow/everflow_test_utilities.py | 80f54a3f9d699544df6279361fba1ae721ec54b4 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | status110/sonic-mgmt | 95f73502c602902c01c985f0639a2dcad2826d03 | ffb090240cfbffdaa2079ee8d5a564902e21858a | refs/heads/master | 2023-01-13T10:12:52.177961 | 2020-11-12T04:18:38 | 2020-11-12T04:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,002 | py | """Utilities for testing the Everflow feature in SONiC."""
import os
import logging
import random
import time
import ipaddr
import binascii
import pytest
import yaml
import ptf.testutils as testutils
import ptf.packet as packet
from abc import abstractmethod
from ptf.mask import Mask
from tests.common.helpers.assertions import pytest_assert
# TODO: Add suport for CONFIGLET mode
CONFIG_MODE_CLI = "cli"
CONFIG_MODE_CONFIGLET = "configlet"
TEMPLATE_DIR = "everflow/templates"
EVERFLOW_RULE_CREATE_TEMPLATE = "acl-erspan.json.j2"
FILE_DIR = "everflow/files"
EVERFLOW_V4_RULES = "ipv4_test_rules.yaml"
EVERFLOW_DSCP_RULES = "dscp_test_rules.yaml"
DUT_RUN_DIR = "/tmp/everflow"
EVERFLOW_RULE_CREATE_FILE = "acl-erspan.json"
EVERFLOW_RULE_DELETE_FILE = "acl-remove.json"
@pytest.fixture(scope="module")
def setup_info(duthost, tbinfo):
"""
Gather all required test information.
Args:
duthost: DUT fixture
tbinfo: tbinfo fixture
Returns:
dict: Required test information
"""
tor_ports = []
spine_ports = []
# Gather test facts
mg_facts = duthost.minigraph_facts(host=duthost.hostname)["ansible_facts"]
switch_capability_facts = duthost.switch_capabilities_facts()["ansible_facts"]
# Get the list of T0/T2 ports
# TODO: The ACL tests do something really similar, I imagine we could refactor this bit.
for dut_port, neigh in mg_facts["minigraph_neighbors"].items():
if "T0" in neigh["name"]:
tor_ports.append(dut_port)
elif "T2" in neigh["name"]:
spine_ports.append(dut_port)
switch_capabilities = switch_capability_facts["switch_capabilities"]["switch"]
test_mirror_v4 = switch_capabilities["MIRROR"] == "true"
test_mirror_v6 = switch_capabilities["MIRRORV6"] == "true"
# NOTE: Older OS versions don't have the ACL_ACTIONS table, and those same devices
# do not support egress ACLs or egress mirroring. Once we branch out the sonic-mgmt
# repo we can remove this case.
if "201811" in duthost.os_version:
test_ingress_mirror_on_ingress_acl = True
test_ingress_mirror_on_egress_acl = False
test_egress_mirror_on_egress_acl = False
test_egress_mirror_on_ingress_acl = False
else:
test_ingress_mirror_on_ingress_acl = "MIRROR_INGRESS_ACTION" in switch_capabilities["ACL_ACTIONS|INGRESS"]
test_ingress_mirror_on_egress_acl = "MIRROR_INGRESS_ACTION" in switch_capabilities["ACL_ACTIONS|EGRESS"]
test_egress_mirror_on_egress_acl = "MIRROR_EGRESS_ACTION" in switch_capabilities["ACL_ACTIONS|EGRESS"]
test_egress_mirror_on_ingress_acl = "MIRROR_EGRESS_ACTION" in switch_capabilities["ACL_ACTIONS|INGRESS"]
# Collects a list of interfaces, their port number for PTF, and the LAGs they are members of,
# if applicable.
#
# TODO: Add a namedtuple to make the groupings more explicit
def get_port_info(in_port_list, out_port_list, out_port_ptf_id_list, out_port_lag_name):
out_port_exclude_list = []
for port in in_port_list:
if port not in out_port_list and port not in out_port_exclude_list and len(out_port_list) < 4:
ptf_port_id = str(mg_facts["minigraph_port_indices"][port])
out_port_list.append(port)
out_port_lag_name.append("Not Applicable")
for portchannelinfo in mg_facts["minigraph_portchannels"].items():
if port in portchannelinfo[1]["members"]:
out_port_lag_name[-1] = portchannelinfo[0]
for lag_member in portchannelinfo[1]["members"]:
if port == lag_member:
continue
ptf_port_id += "," + (str(mg_facts["minigraph_port_indices"][lag_member]))
out_port_exclude_list.append(lag_member)
out_port_ptf_id_list.append(ptf_port_id)
tor_dest_ports = []
tor_dest_ports_ptf_id = []
tor_dest_lag_name = []
get_port_info(tor_ports, tor_dest_ports, tor_dest_ports_ptf_id, tor_dest_lag_name)
spine_dest_ports = []
spine_dest_ports_ptf_id = []
spine_dest_lag_name = []
get_port_info(spine_ports, spine_dest_ports, spine_dest_ports_ptf_id, spine_dest_lag_name)
# TODO: Some of this can probably be tailored to the specific set of test cases (e.g.
# we don't need spine v. tor info to check match types).
#
# Also given how much info is here it probably makes sense to make a data object/named
# tuple to help with the typing.
setup_information = {
"router_mac": duthost.facts["router_mac"],
"tor_ports": tor_ports,
"spine_ports": spine_ports,
"test_mirror_v4": test_mirror_v4,
"test_mirror_v6": test_mirror_v6,
"ingress": {
"ingress": test_ingress_mirror_on_ingress_acl,
"egress": test_egress_mirror_on_ingress_acl
},
"egress": {
"ingress": test_ingress_mirror_on_egress_acl,
"egress": test_egress_mirror_on_egress_acl
},
"tor": {
"src_port": spine_ports[0],
"src_port_ptf_id": str(mg_facts["minigraph_port_indices"][spine_ports[0]]),
"dest_port": tor_dest_ports,
"dest_port_ptf_id": tor_dest_ports_ptf_id,
"dest_port_lag_name": tor_dest_lag_name
},
"spine": {
"src_port": tor_ports[0],
"src_port_ptf_id": str(mg_facts["minigraph_port_indices"][tor_ports[0]]),
"dest_port": spine_dest_ports,
"dest_port_ptf_id": spine_dest_ports_ptf_id,
"dest_port_lag_name": spine_dest_lag_name
},
"port_index_map": {
k: v
for k, v in mg_facts["minigraph_port_indices"].items()
if k in mg_facts["minigraph_ports"]
}
}
# NOTE: This is important to add since for the Policer test case regular packets
# and mirror packets can go to same interface, which causes tail drop of
# police packets and impacts test case cir/cbs calculation.
#
# We are making sure regular traffic has a dedicated route and does not use
# the default route.
peer_ip, _ = get_neighbor_info(duthost, spine_dest_ports[3])
# Disable recursive route resolution as we have test case where we check
# if better unresolved route is there then it should not be picked by Mirror state DB
# This change is triggeed by Sonic PR#https://github.com/Azure/sonic-buildimage/pull/5600
duthost.shell("vtysh -c \"configure terminal\" -c \"no ip nht resolve-via-default\"")
add_route(duthost, "30.0.0.1/24", peer_ip)
duthost.command("mkdir -p {}".format(DUT_RUN_DIR))
yield setup_information
duthost.command("rm -rf {}".format(DUT_RUN_DIR))
remove_route(duthost, "30.0.0.1/24", peer_ip)
duthost.shell("vtysh -c \"configure terminal\" -c \"ip nht resolve-via-default\"")
# TODO: This should be refactored to some common area of sonic-mgmt.
def add_route(duthost, prefix, nexthop):
"""
Add a route to the DUT.
Args:
duthost: DUT fixture
prefix: IP prefix for the route
nexthop: next hop for the route
"""
duthost.shell("vtysh -c \"configure terminal\" -c \"ip route {} {}\"".format(prefix, nexthop))
# TODO: This should be refactored to some common area of sonic-mgmt.
def remove_route(duthost, prefix, nexthop):
"""
Remove a route from the DUT.
Args:
duthost: DUT fixture
prefix: IP prefix to remove
nexthop: next hop to remove
"""
duthost.shell("vtysh -c \"configure terminal\" -c \"no ip route {} {}\"".format(prefix, nexthop))
# TODO: This should be refactored to some common area of sonic-mgmt.
def get_neighbor_info(duthost, dest_port, resolved=True):
"""
Get the IP and MAC of the neighbor on the specified destination port.
Args:
duthost: DUT fixture
dest_port: The port for which to gather the neighbor information
resolved: Whether to return a resolved route or not
"""
if not resolved:
return "20.20.20.100", None
mg_facts = duthost.minigraph_facts(host=duthost.hostname)["ansible_facts"]
for bgp_peer in mg_facts["minigraph_bgp"]:
if bgp_peer["name"] == mg_facts["minigraph_neighbors"][dest_port]["name"] and ipaddr.IPAddress(bgp_peer["addr"]).version == 4:
peer_ip = bgp_peer["addr"]
break
return peer_ip, duthost.shell("ip neigh show {} | awk -F\" \" \"{{print $5}}\"".format(peer_ip))["stdout"]
# TODO: This can probably be moved to a shared location in a later PR.
def load_acl_rules_config(table_name, rules_file):
with open(rules_file, "r") as f:
acl_rules = yaml.safe_load(f)
rules_config = {"acl_table_name": table_name, "rules": acl_rules}
return rules_config
class BaseEverflowTest(object):
"""
Base class for setting up a set of Everflow tests.
Contains common methods for setting up the mirror session and describing the
mirror and ACL stage for the tests.
"""
OUTER_HEADER_SIZE = 38
@pytest.fixture(scope="class", params=[CONFIG_MODE_CLI])
def config_method(self, request):
"""Get the configuration method for this set of test cases.
There are multiple ways to configure Everflow on a SONiC device,
so we need to verify that Everflow functions properly for each method.
Returns:
The configuration method to use.
"""
return request.param
@pytest.fixture(scope="class")
def setup_mirror_session(self, duthost, config_method):
"""
Set up a mirror session for Everflow.
Args:
duthost: DUT fixture
Yields:
dict: Information about the mirror session configuration.
"""
session_info = self._mirror_session_info("test_session_1", duthost.facts["asic_type"])
self.apply_mirror_config(duthost, session_info, config_method)
yield session_info
self.remove_mirror_config(duthost, session_info["session_name"], config_method)
@pytest.fixture(scope="class")
def policer_mirror_session(self, duthost, config_method):
"""
Set up a mirror session with a policer for Everflow.
Args:
duthost: DUT fixture
Yields:
dict: Information about the mirror session configuration.
"""
policer = "TEST_POLICER"
# Create a policer that allows 100 packets/sec through
self.apply_policer_config(duthost, policer, config_method)
# Create a mirror session with the TEST_POLICER attached
session_info = self._mirror_session_info("TEST_POLICER_SESSION", duthost.facts["asic_type"])
self.apply_mirror_config(duthost, session_info, config_method, policer=policer)
yield session_info
# Clean up mirror session and policer
self.remove_mirror_config(duthost, session_info["session_name"], config_method)
self.remove_policer_config(duthost, policer, config_method)
def apply_mirror_config(self, duthost, session_info, config_method, policer=None):
if config_method == CONFIG_MODE_CLI:
command = "config mirror_session add {} {} {} {} {} {}" \
.format(session_info["session_name"],
session_info["session_src_ip"],
session_info["session_dst_ip"],
session_info["session_dscp"],
session_info["session_ttl"],
session_info["session_gre"])
if policer:
command += " --policer {}".format(policer)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
def remove_mirror_config(self, duthost, session_name, config_method):
if config_method == CONFIG_MODE_CLI:
command = "config mirror_session remove {}".format(session_name)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
def apply_policer_config(self, duthost, policer_name, config_method, rate_limit=100):
if config_method == CONFIG_MODE_CLI:
command = ("redis-cli -n 4 hmset \"POLICER|{}\" "
"meter_type packets mode sr_tcm cir {} cbs {} "
"red_packet_action drop").format(policer_name, rate_limit, rate_limit)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
def remove_policer_config(self, duthost, policer_name, config_method):
if config_method == CONFIG_MODE_CLI:
command = "redis-cli -n 4 del \"POLICER|{}\"".format(policer_name)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
@pytest.fixture(scope="class", autouse=True)
def setup_acl_table(self, duthost, setup_info, setup_mirror_session, config_method):
"""
Configure the ACL table for this set of test cases.
Args:
duthost: DUT fixture
setup_info: Fixture with info about the testbed setup
setup_mirror_session: Fixtue with info about the mirror session
"""
if not setup_info[self.acl_stage()][self.mirror_type()]:
pytest.skip("{} ACL w/ {} Mirroring not supported, skipping"
.format(self.acl_stage(), self.mirror_type()))
table_name = "EVERFLOW" if self.acl_stage() == "ingress" else "EVERFLOW_EGRESS"
# NOTE: We currently assume that the ingress MIRROR tables already exist.
if self.acl_stage() == "egress":
self.apply_acl_table_config(duthost, table_name, "MIRROR", config_method)
self.apply_acl_rule_config(duthost, table_name, setup_mirror_session["session_name"], config_method)
yield
self.remove_acl_rule_config(duthost, table_name, config_method)
if self.acl_stage() == "egress":
self.remove_acl_table_config(duthost, "EVERFLOW_EGRESS", config_method)
def apply_acl_table_config(self, duthost, table_name, table_type, config_method):
if config_method == CONFIG_MODE_CLI:
command = "config acl add table {} {}".format(table_name, table_type)
# NOTE: Until the repo branches, we're only applying the flag
# on egress tables to preserve backwards compatibility.
if self.acl_stage() == "egress":
command += " --stage {}".format(self.acl_stage())
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
def remove_acl_table_config(self, duthost, table_name, config_method):
if config_method == CONFIG_MODE_CLI:
command = "config acl remove table {}".format(table_name)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
def apply_acl_rule_config(
self,
duthost,
table_name,
session_name,
config_method,
rules=EVERFLOW_V4_RULES
):
rules_config = load_acl_rules_config(table_name, os.path.join(FILE_DIR, rules))
duthost.host.options["variable_manager"].extra_vars.update(rules_config)
if config_method == CONFIG_MODE_CLI:
duthost.template(src=os.path.join(TEMPLATE_DIR, EVERFLOW_RULE_CREATE_TEMPLATE),
dest=os.path.join(DUT_RUN_DIR, EVERFLOW_RULE_CREATE_FILE))
command = "acl-loader update full {} --table_name {} --session_name {}" \
.format(os.path.join(DUT_RUN_DIR, EVERFLOW_RULE_CREATE_FILE),
table_name,
session_name)
# NOTE: Until the repo branches, we're only applying the flag
# on egress mirroring to preserve backwards compatibility.
if self.mirror_type() == "egress":
command += " --mirror_stage {}".format(self.mirror_type())
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
time.sleep(2)
def remove_acl_rule_config(self, duthost, table_name, config_method):
if config_method == CONFIG_MODE_CLI:
duthost.copy(src=os.path.join(FILE_DIR, EVERFLOW_RULE_DELETE_FILE),
dest=DUT_RUN_DIR)
command = "acl-loader update full {} --table_name {}" \
.format(os.path.join(DUT_RUN_DIR, EVERFLOW_RULE_DELETE_FILE), table_name)
elif config_method == CONFIG_MODE_CONFIGLET:
pass
duthost.command(command)
@abstractmethod
def mirror_type(self):
"""
Get the mirror stage for this set of test cases.
Used to parametrize test cases based on the mirror stage.
"""
pass
@abstractmethod
def acl_stage(self):
"""
Get the ACL stage for this set of test cases.
Used to parametrize test cases based on the ACL stage.
"""
pass
def send_and_check_mirror_packets(self,
setup,
mirror_session,
ptfadapter,
duthost,
mirror_packet,
src_port=None,
dest_ports=None,
expect_recv=True):
expected_mirror_packet = self._get_expected_mirror_packet(mirror_session,
setup,
duthost,
mirror_packet)
if not src_port:
src_port = self._get_random_src_port(setup)
if not dest_ports:
dest_ports = [self._get_monitor_port(setup, mirror_session, duthost)]
ptfadapter.dataplane.flush()
testutils.send(ptfadapter, src_port, mirror_packet)
if expect_recv:
_, received_packet = testutils.verify_packet_any_port(
ptfadapter,
expected_mirror_packet,
ports=dest_ports
)
logging.info("Received packet: %s", packet.Ether(received_packet).summary())
inner_packet = self._extract_mirror_payload(received_packet, len(mirror_packet))
logging.info("Received inner packet: %s", inner_packet.summary())
inner_packet = Mask(inner_packet)
# For egress mirroring, we expect the DUT to have modified the packet
# before forwarding it. Specifically:
#
# - In L2 the SMAC and DMAC will change.
# - In L3 the TTL and checksum will change.
#
# We know what the TTL and SMAC should be after going through the pipeline,
# but DMAC and checksum are trickier. For now, update the TTL and SMAC, and
# mask off the DMAC and IP Checksum to verify the packet contents.
if self.mirror_type() == "egress":
mirror_packet[packet.IP].ttl -= 1
mirror_packet[packet.Ether].src = setup["router_mac"]
inner_packet.set_do_not_care_scapy(packet.Ether, "dst")
inner_packet.set_do_not_care_scapy(packet.IP, "chksum")
logging.info("Expected inner packet: %s", mirror_packet.summary())
pytest_assert(inner_packet.pkt_match(mirror_packet), "Mirror payload does not match received packet")
else:
testutils.verify_no_packet_any(ptfadapter, expected_mirror_packet, dest_ports)
def _get_expected_mirror_packet(self, mirror_session, setup, duthost, mirror_packet):
payload = mirror_packet.copy()
# Add vendor specific padding to the packet
if duthost.facts["asic_type"] in ["mellanox"]:
payload = binascii.unhexlify("0" * 44) + str(payload)
if duthost.facts["asic_type"] in ["barefoot"]:
payload = binascii.unhexlify("0" * 24) + str(payload)
expected_packet = testutils.simple_gre_packet(
eth_src=setup["router_mac"],
ip_src=mirror_session["session_src_ip"],
ip_dst=mirror_session["session_dst_ip"],
ip_dscp=int(mirror_session["session_dscp"]),
ip_id=0,
ip_ttl=int(mirror_session["session_ttl"]),
inner_frame=payload
)
expected_packet["GRE"].proto = mirror_session["session_gre"]
expected_packet = Mask(expected_packet)
expected_packet.set_do_not_care_scapy(packet.Ether, "dst")
expected_packet.set_do_not_care_scapy(packet.IP, "ihl")
expected_packet.set_do_not_care_scapy(packet.IP, "len")
expected_packet.set_do_not_care_scapy(packet.IP, "flags")
expected_packet.set_do_not_care_scapy(packet.IP, "chksum")
# The fanout switch may modify this value en route to the PTF so we should ignore it, even
# though the session does have a DSCP specified.
expected_packet.set_do_not_care_scapy(packet.IP, "tos")
# Mask off the payload (we check it later)
expected_packet.set_do_not_care(self.OUTER_HEADER_SIZE * 8, len(payload) * 8)
return expected_packet
def _extract_mirror_payload(self, encapsulated_packet, payload_size):
pytest_assert(len(encapsulated_packet) >= self.OUTER_HEADER_SIZE,
"Incomplete packet, expected at least {} header bytes".format(self.OUTER_HEADER_SIZE))
inner_frame = encapsulated_packet[-payload_size:]
return packet.Ether(inner_frame)
def _mirror_session_info(self, session_name, asic_type):
session_src_ip = "1.1.1.1"
session_dst_ip = "2.2.2.2"
session_dscp = "8"
session_ttl = "1"
if "mellanox" == asic_type:
session_gre = 0x8949
elif "barefoot" == asic_type:
session_gre = 0x22EB
else:
session_gre = 0x88BE
session_prefix_lens = ["24", "32"]
session_prefixes = []
for prefix_len in session_prefix_lens:
session_prefixes.append(str(ipaddr.IPNetwork(session_dst_ip + "/" + prefix_len).network) + "/" + prefix_len)
return {
"session_name": session_name,
"session_src_ip": session_src_ip,
"session_dst_ip": session_dst_ip,
"session_dscp": session_dscp,
"session_ttl": session_ttl,
"session_gre": session_gre,
"session_prefixes": session_prefixes
}
def _get_random_src_port(self, setup):
return setup["port_index_map"][random.choice(setup["port_index_map"].keys())]
def _get_monitor_port(self, setup, mirror_session, duthost):
mirror_output = duthost.command("show mirror_session")
logging.info("Running mirror session configuration:\n%s", mirror_output["stdout"])
matching_session = list(filter(lambda line: line.startswith(mirror_session["session_name"]),
mirror_output["stdout_lines"]))
pytest_assert(matching_session, "Test mirror session {} not found".format(mirror_session["session_name"]))
logging.info("Found mirror session:\n%s", matching_session[0])
monitor_port = matching_session[0].split()[-1]
pytest_assert(monitor_port in setup["port_index_map"],
"Invalid monitor port:\n{}".format(mirror_output["stdout"]))
logging.info("Selected monitor port %s (index=%s)", monitor_port, setup["port_index_map"][monitor_port])
return setup["port_index_map"][monitor_port]
| [
"[email protected]"
] | |
0dbbc1f7205fac49e6f4f2f77cca3b72b9c10ea3 | 0708d5e1159bf9d49066d4981b241e2549b1f643 | /boxes4.py | c5bc8a9460a1912871cd7b40e306963d5c089aab | [] | no_license | MatrixMike/Python | 1a6957ba387f83e975da7b92146928aa78ce64f2 | 9fee873124ea75afee46e1c40e9cb45c519de662 | refs/heads/master | 2022-12-29T10:08:43.187690 | 2022-12-21T11:07:05 | 2022-12-21T11:07:05 | 94,441,697 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | import turtle
def tilted_square():
turtle.left(angle)
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
angle = 20 # <--
tilted_square()
tilted_square()
tilted_square()
| [
"[email protected]"
] | |
da453faaf1b6d9d6c8d858e614200a4b72ead8d3 | 4ae53803aabb0e3305631083a8a597c1b48c727b | /apps/ticket/admin.py | 85276858e1ad0b22559d2a2367a5f00940e8a224 | [] | no_license | Fentrys/PruebasNewdta | 273b4a9d93676ad32953e8a40a9e5b5e875d9e0e | bb122a76b1580e4b68c4c9fab8d91c0fa95f9667 | refs/heads/master | 2020-09-21T14:40:01.687014 | 2019-11-29T14:03:21 | 2019-11-29T14:03:21 | 224,819,561 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | from django.contrib import admin
from apps.ticket.models import Ticket
# Register your models here.
admin.site.register(Ticket) | [
"[email protected]"
] | |
cbd6dfa66b388986b851c34127c9fcf1a81cb5d2 | 9446ef8ce5b3f83d4f2acf8fb73deb536632eb5b | /Assignments/Ass4/test.py | 2be0ae0da643270ad3c5a5bb7274cf929c5cc0b4 | [] | no_license | harshkasyap/ml-class | 3512413c84f35708e76ce28dae8bef13d1c32526 | 83a11e35f634c17edc4f6b4a6b68bb475a221b5d | refs/heads/master | 2022-09-25T13:28:25.425644 | 2019-12-07T05:23:48 | 2019-12-07T05:23:48 | 195,331,577 | 0 | 1 | null | 2022-09-08T11:28:52 | 2019-07-05T03:19:15 | MATLAB | UTF-8 | Python | false | false | 229 | py | import nltk
nltk.download('punkt')
nltk.download('averaged_perceptron_tagger')
sentence = "They refuse to permit us to obtain the refuse permit."
tokens = nltk.word_tokenize(sentence)
print (tokens)
print (nltk.pos_tag(tokens)) | [
"[email protected]"
] | |
748d01cc73380329e8a671b250e78f01670fbea5 | 25cab45bf9ad0b2a4d81345f0af9189bd3d1987c | /setup.py | c232273aa84fb5b3a4a0364faa27b347ed9c5528 | [
"Apache-2.0"
] | permissive | opentelekomcloud-infra/zuul-project-config | 319e10946a6ecca0ad8e4da5ba804afb5cd56468 | f75672f88515049f163e76eb1c5030dc4dc90d0e | refs/heads/main | 2023-08-30T00:58:48.763852 | 2023-08-28T07:24:29 | 2023-08-28T07:24:29 | 133,349,600 | 2 | 0 | Apache-2.0 | 2023-09-05T19:58:47 | 2018-05-14T11:19:14 | Python | UTF-8 | Python | false | false | 564 | py | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0'],
pbr=True)
| [
"[email protected]"
] | |
dc86dd132dbfa443f8cb21e99cb878f6224232a5 | 0207812e037649fe4d916397fcaa1b4c1ac46c7c | /venv/bin/easy_install | 6a7ed833ac201b57189d5cdb7295d62b98e182d9 | [] | no_license | s777610/530_Project_LA_Crime | 047c96dc40b10f58dea45e6496d160a48fe2f7c5 | 19593578515e0be1ed082a5e5d005eb37b04ab2f | refs/heads/master | 2020-03-22T17:07:49.526017 | 2018-07-19T23:13:04 | 2018-07-19T23:13:04 | 140,374,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | #!/Users/hungweicheng/PycharmProjects/LA_crime_analysis/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | ||
c61bba04ba90b902317654197e8dd0546a386380 | 2734b77a68f6d7e22e8b823418ad1c59fe1a34af | /opengever/tabbedview/upgrades/to2202.py | 7dd61b11749007b632aaebc5820297a180e31a60 | [] | no_license | 4teamwork/opengever.core | 5963660f5f131bc12fd0a5898f1d7c8f24a5e2b1 | a01bec6c00d203c21a1b0449f8d489d0033c02b7 | refs/heads/master | 2023-08-30T23:11:27.914905 | 2023-08-25T14:27:15 | 2023-08-25T14:27:15 | 9,788,097 | 19 | 8 | null | 2023-09-14T13:28:56 | 2013-05-01T08:28:16 | Python | UTF-8 | Python | false | false | 349 | py | from ftw.upgrade import UpgradeStep
class RenameStatefilterJS(UpgradeStep):
def __call__(self):
js_registry = self.getToolByName('portal_javascripts')
js_registry.renameResource(
'++resource++opengever.tabbedview-resources/tasklisting.js',
'++resource++opengever.tabbedview-resources/statefilter.js')
| [
"[email protected]"
] | |
2dd90f9d0a8bf5c956add88a4854c64195e7a328 | 6284d7a282f6508c34a74ade286344d3124007c5 | /server.py | 487468d28b6b560a49bf9da69999e291c1386ddb | [] | no_license | k-hub/movie-ratings | cd118ba5ad22486d6642cfc36e6c4ddf64cd92d3 | 593893abffbaf7296337415ed917980530739a17 | refs/heads/master | 2021-01-01T05:16:01.630156 | 2016-05-06T01:25:48 | 2016-05-06T01:25:48 | 57,918,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,670 | py | """Movie Ratings."""
from jinja2 import StrictUndefined
from flask import Flask, render_template, redirect, request, flash, session
from flask_debugtoolbar import DebugToolbarExtension
from model import User, Rating, Movie,connect_to_db, db
app = Flask(__name__)
# Required to use Flask sessions and the debug toolbar
app.secret_key = "ABC"
# Normally, if you use an undefined variable in Jinja2, it fails silently.
# This is horrible. Fix this so that, instead, it raises an error.
app.jinja_env.undefined = StrictUndefined
@app.route('/')
def index():
"""Homepage."""
return render_template("homepage.html")
@app.route("/users")
def user_list():
"""Show list of users."""
users = User.query.all()
return render_template("user_list.html", users=users)
@app.route("/user-profile/<int:user_id>")
def user_profile(user_id):
"""Shows user profile"""
user = db.session.query(User).filter(User.user_id==user_id).first()
print user
return render_template("profile.html", display_profile=user)
@app.route("/login-form", methods=["GET"])
def login_form():
"""User login"""
return render_template("login.html")
@app.route("/login", methods=["POST"])
def login_process():
"""Process login"""
email = request.form.get("email")
pw = request.form.get("password")
# error = None
# Returns a single User object querying by email.
user = db.session.query(User).filter(User.email==email).first()
# input_email_pw = (request.args.get("inputEmail"), request.args.get("inputPassword"))
# if user_email:
# user_pw = db.session.query(User).filter(User.email==email, User.password==pw).one()
# flash('You were successfully logged in')
# return redirect("/")
# else:
# error = 'Invalid credentials'
# return render_template('login.html', error=error)
##Ask why rows 62-73 didn't work...
# session["user_id"] = user.user_id
# if user:
# if pw == user.password:
# flash("Successfully logged in!")
# return redirect("/users/%s" % user.user_id)
# else:
# flash("Invalid login/password.")
# return render_template("login.html")
# elif user is None:
# flash("Please sign up for an account")
# return render_template("registration.html")
if not user:
flash("No such user")
return redirect("/register")
if user.password != pw:
flash("Incorrect password")
return redirect("/login")
session["user_id"] = user.user_id
flash("Logged in")
return redirect("/users/%s" % user.user_id)
@app.route('/logout')
def logout():
"""Log out."""
del session["user_id"]
flash("Logged Out.")
return redirect("/")
@app.route("/register", methods=["GET"])
def registration_form():
"""Register new user"""
return render_template("registration.html")
@app.route('/register', methods=['POST'])
def register_process():
"""Process registration."""
# Get form variables
email = request.form["email"]
password = request.form["password"]
age = int(request.form["age"])
zipcode = request.form["zipcode"]
new_user = User(email=email, password=password, age=age, zipcode=zipcode)
db.session.add(new_user)
db.session.commit()
flash("User %s added." % email)
return redirect("/")
if __name__ == "__main__":
# We have to set debug=True here, since it has to be True at the point
# that we invoke the DebugToolbarExtension
app.debug = True
connect_to_db(app)
# Use the DebugToolbar
DebugToolbarExtension(app)
app.run()
| [
"[email protected]"
] | |
dccaf3c89b50deb57b08827086f56ea05456e8e7 | e4cdf194f8ff2384c58e2af2327c966eb7e28e18 | /source/config.py | ab45c99162a57b3e25adeb3d7401dc620cd3f2b8 | [] | no_license | rymmx/YuBlog | 6a9af5942683227a2e53d2c09c1de790f5481585 | 473d873ec59eb479d3a5cdebd875921818c397af | refs/heads/master | 2020-04-12T13:16:47.038323 | 2018-12-09T14:13:06 | 2018-12-09T14:13:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,687 | py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
CSRF_ENABLED = True
SECRET_KEY = 'you-guess'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
POSTS_PER_PAGE = 10
ADMIN_POSTS_PER_PAGE = 20
ACHIVES_POSTS_PER_PAGE = 20
SEARCH_POSTS_PER_PAGE = 15
COMMENTS_PER_PAGE = 10
ADMIN_COMMENTS_PER_PAGE = 50
UPLOAD_PATH = './app/static/upload/'
# 博客信息
# 管理员姓名
ADMIN_NAME = '俞坤'
# 管理员登录信息
ADMIN_LOGIN_NAME = 'yukun'
# 登录密码
ADMIN_PASSWORD = os.getenv('ADMIN_PASSWORD') or 'password'
# 博客名
SITE_NAME = '意外'
# 博客标题
SITE_TITLE = '俞坤的博客'
# 管理员简介
ADMIN_PROFILE = '克制力,执行力'
# RSS站点信息
# 站点协议
WEB_PROTOCOL = 'http'
# 站点域名
WEB_URL = 'www.yukunweb.com'
# 站点创建时间
WEB_START_TIME = '2017-05-25'
# 显示条数
RSS_COUNTS = 10
# 发送邮件用户登录
MAIL_USERNAME = os.getenv('MAIL_USERNAME')
# 客户端登录密码非正常登录密码
MAIL_PASSWORD = os.getenv('MAIL_PASSWORD')
MAIL_SERVER = os.getenv('MAIL_SERVER') or 'smtp.qq.com'
MAIL_PORT = os.getenv('MAIL_PORT') or '465'
ADMIN_MAIL_SUBJECT_PREFIX = 'blog'
ADMIN_MAIL_SENDER = 'admin email'
# 接收邮件通知的邮箱
ADMIN_MAIL = os.getenv('ADMIN_MAIL')
# 搜索最小字节
WHOOSHEE_MIN_STRING_LEN = 1
# cache 使用 Redis 数据库缓存配置
CACHE_TYPE = 'redis'
CACHE_REDIS_HOST = '127.0.0.1'
CACHE_REDIS_PORT = 6379
CACHE_REDIS_DB = os.getenv('CACHE_REDIS_DB') or ''
CHCHE_REDIS_PASSWORD = os.getenv('CHCHE_REDIS_PASSWORD') or ''
# 七牛云存储配置
NEED_PIC_BED = False
QN_ACCESS_KEY = os.getenv('QN_ACCESS_KEY') or ''
QN_SECRET_KEY = os.getenv('QN_SECRET_KEY') or ''
# 七牛空间名
QN_PIC_BUCKET = 'bucket-name'
# 七牛外链域名
QN_PIC_DOMAIN = 'domain-url'
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:password@localhost:3306/mydb'
DEBUG = True
class TestingConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:password@localhost:3306/testdb'
TESTING = True
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:password@localhost:3306/mydb'
DEBUG = False
@classmethod
def init_app(cls, app):
Config.init_app(app)
# 把错误发给管理
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if getattr(cls, 'MAIL_USERNAME', None) is not None:
credentials = (cls.MAIL_USERNAME, cls.MAIL_PASSWORD)
if getattr(cls, 'MAIL_USE_TLS', None):
secure = ()
mail_handler = SMTPHandler(
mailhost=(cls.MAIL_SERVER, cls.MAIL_PORT),
fromaddr=cls.ADMIN_MAIL_SENDER,
toaddrs=[cls.ADMIN_MAIL],
subject=cls.ADMIN_MAIL_SUBJECT_PREFIX + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class DockerConfig(ProductionConfig):
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:password@db:3306/mydb'
DEBUG = False
CACHE_REDIS_HOST = 'cache'
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'docker': DockerConfig,
'default': DevelopmentConfig
}
| [
"[email protected]"
] | |
f42e995e23dc2681516d47492fd67d14669e9f96 | fbbcae1df6c989b87a74debe8d654c9ff0ecf575 | /backend/framework/qlf/dashboard/bokeh/globalfocus/main.py | c3d12f98f8d57fee74f479208e3a1250fccd41c7 | [] | no_license | jorgemachucav/qlf | 05c142582d3e5a6a13e6325acdbc4fbc02c1ad9b | ade1b80a40c4f05cbee987d3e48c0c088e77247c | refs/heads/master | 2020-04-21T10:04:54.043686 | 2019-02-26T02:00:53 | 2019-02-26T02:00:53 | 169,475,402 | 5 | 0 | null | 2019-02-06T20:55:59 | 2019-02-06T20:55:58 | null | UTF-8 | Python | false | false | 7,276 | py | from bokeh.plotting import Figure
from bokeh.layouts import row, column
from bokeh.models import HoverTool, ColumnDataSource, Range1d
from bokeh.models import LinearColorMapper, ColorBar
from qlf_models import QLFModels
from dashboard.bokeh.helper import get_palette
import numpy as np
import logging
from bokeh.resources import CDN
from bokeh.embed import file_html
import os
from dashboard.models import Job, Process, Fibermap
spectro_data = os.environ.get('DESI_SPECTRO_DATA')
logger = logging.getLogger(__name__)
class GlobalFocus:
def __init__(self, process_id, arm):
self.selected_process_id = process_id
self.selected_arm = arm
def data_source(self, fmap):
""" Creating data source for plots
"""
data_model = {
'x': [],
'w': [],
'cam': [],
'OBJ_TYPE': [],
'ra': [],
'dec': [],
}
process_id = self.selected_process_id
joblist = [entry.camera.camera for entry in Job.objects.filter(
process_id=process_id)]
ra_tile = fmap.fiber_ra
dec_tile = fmap.fiber_dec
otype_tile = fmap.objtype
y = []
w = []
cam_inst = []
for spec in list(range(10)):
cam = self.selected_arm+str(spec)
if cam in joblist:
mergedqa = QLFModels().get_output(
self.selected_process_id, cam)
xwsig = mergedqa['TASKS']['CHECK_CCDs']['METRICS']['XWSIGMA_FIB']
y = y + xwsig[0]
w = w + xwsig[1]
else:
y = y + 500*[np.nan]
w = w + 500*[np.nan]
cam_inst = cam_inst + [cam]*500
data_model['x'] = y
data_model['w'] = w
data_model['cam'] = cam_inst
data_model['OBJ_TYPE'] = otype_tile
data_model['ra'] = ra_tile
data_model['dec'] = dec_tile
source = ColumnDataSource(data=data_model)
return source
def wedge_plot(self, wedge_arm, fmap, common_source=None, sigma_kind='x'):
ra_center = fmap.exposure.telra
dec_center = fmap.exposure.teldec
fiber_tooltip = """
<div>
<div>
<span style="font-size: 12px; font-weight: bold; color: #303030;">SIGMA: </span>
<span style="font-size: 13px; color: #515151">@y</span>
</div>
<div>
<span style="font-size: 12px; font-weight: bold; color: #303030;">RA: </span>
<span style="font-size: 13px; color: #515151;">@ra</span>
</div>
<div>
<span style="font-size: 12px; font-weight: bold; color: #303030;">DEC: </span>
<span style="font-size: 13px; color: #515151;">@dec</span>
</div>
<div>
<span style="font-size: 12px; font-weight: bold; color: #303030;">Obj Type: </span>
<span style="font-size: 13px; color: #515151;">@OBJ_TYPE</span>
</div>
<div>
<span style="font-size: 12px; font-weight: bold; color: #303030;">CAM: </span>
<span style="font-size: 13px; color: #515151;">@cam</span>
</div>
"""
fiber_tooltip = fiber_tooltip.replace(
'SIGMA:', '%sSIGMA:' % sigma_kind.upper())
hover = HoverTool(tooltips=fiber_tooltip)
my_palette = get_palette("bwr")
source = common_source
process_id = self.selected_process_id
joblist = [entry.camera.camera for entry in Job.objects.filter(
process_id=process_id)]
if len(joblist) > 0:
cam = joblist[0]
mergedqa = QLFModels().get_output(
self.selected_process_id, cam)
warn_range = mergedqa['TASKS']['CHECK_CCDs']['PARAMS']['XWSIGMA_WARN_RANGE']
arg_kind = {'x': 0, 'w': 1}
refvalue = mergedqa['TASKS']['CHECK_CCDs']['PARAMS']['XWSIGMA_REF'][arg_kind[sigma_kind]]
rng_warn_min, rng_warn_max = warn_range[0] + \
refvalue, warn_range[1] + refvalue
sigma = source.data['{}'.format(sigma_kind)]
rng_min, rng_max = np.nanmin(sigma), np.nanmax(sigma)
rng = rng_max-rng_min
if np.isnan(rng_min) or np.isnan(rng_max):
fill_color = 'lightgray'
else:
mapper = LinearColorMapper(palette=my_palette, nan_color='lightgray',
low=rng_warn_min,
high=rng_warn_max)
fill_color = {'field': '%s' % (sigma_kind), 'transform': mapper}
radius = 0.017
radius_hover = 0.018
xrange = Range1d(start=ra_center + 2, end=ra_center-2)
yrange = Range1d(start=dec_center+1.8, end=dec_center-1.8)
p = Figure(title='FOCUS %s (ARM %s)' % (sigma_kind.upper(), wedge_arm), x_axis_label='RA', y_axis_label='DEC', plot_width=600, plot_height=600, tools=[hover, "box_zoom,pan,wheel_zoom,reset,lasso_select,crosshair"], active_drag="box_zoom", x_range=xrange, y_range=yrange
)
p.title.align = 'center'
p.circle('ra', 'dec', source=source, name="data", radius=radius,
fill_color=fill_color,
line_color='black', line_width=0.4,
hover_line_color='red')
p.circle('ra', 'dec', source=source, name="data", radius=radius_hover,
hover_fill_color=fill_color,
fill_color=None,
line_color=None, line_width=3, hover_line_color='orange')
if 'mapper' in locals():
cbar = Figure(height=p.plot_height,
width=120,
toolbar_location=None,
min_border=0,
outline_line_color=None,
)
color_bar = ColorBar(color_mapper=mapper, label_standoff=14,
major_label_text_font_style="bold", padding=26,
major_label_text_align='right',
major_label_text_font_size="10pt",
location=(0, 0))
cbar.title.align = 'center'
cbar.title.text_font_size = '10pt'
cbar.add_layout(color_bar, 'left')
p_list = [cbar, p]
else:
p_list = [p]
return p_list
def load_qa(self):
process_id = self.selected_process_id
process = Process.objects.get(pk=process_id)
exposure = process.exposure
fmap = Fibermap.objects.filter(exposure=exposure)[0]
src = self.data_source(fmap)
# , common_source=source)
p = self.wedge_plot(self.selected_arm, fmap,
common_source=src, sigma_kind='x')
pw = self.wedge_plot(self.selected_arm, fmap,
common_source=src, sigma_kind='w')
layout = row(column(row(p), row(pw)),)
return file_html(layout, CDN, "Global Focus")
if __name__ == '__main__':
print('debbuging instance')
| [
"[email protected]"
] | |
d0d9d5da07bbbe7a4f0b738545d40f6f7ca87f57 | e2b1219048ad05b742ba62df23ab95c5fac8e105 | /pyduino.py | 1cd6df14a30a9ad5fe38b5a0d85ae93fdff33751 | [] | no_license | kojino/Room-In-Use | a86331185d6b8cc9027c1f35ce7e854652e96c11 | f9a185e918051a27e166391a468984889856e74c | refs/heads/master | 2020-06-10T09:36:02.654190 | 2016-12-13T20:22:23 | 2016-12-13T20:22:23 | 75,973,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,427 | py | """
A library to interface Arduino through serial connection
"""
import serial
class Arduino():
"""
Models an Arduino connection
"""
def __init__(self, serial_port='/dev/tty.usbmodem1421', baud_rate=9600,
read_timeout=5):
"""
Initializes the serial connection to the Arduino board
"""
self.conn = serial.Serial(serial_port, baud_rate)
self.conn.timeout = read_timeout # Timeout for readline()
def set_pin_mode(self, pin_number, mode):
"""
Performs a pinMode() operation on pin_number
Internally sends b'M{mode}{pin_number} where mode could be:
- I for INPUT
- O for OUTPUT
- P for INPUT_PULLUP MO13
"""
command = (''.join(('M',mode,str(pin_number)))).encode()
#print 'set_pin_mode =',command,(''.join(('M',mode,str(pin_number))))
self.conn.write(command)
def digital_read(self, pin_number):
"""
Performs a digital read on pin_number and returns the value (1 or 0)
Internally sends b'RD{pin_number}' over the serial connection
"""
command = (''.join(('RD', str(pin_number)))).encode()
self.conn.write(command)
line_received = self.conn.readline().decode().strip()
header, value = line_received.split(':') # e.g. D13:1
if header == ('D'+ str(pin_number)):
# If header matches
return int(value)
def digital_write(self, pin_number, digital_value):
"""
Writes the digital_value on pin_number
Internally sends b'WD{pin_number}:{digital_value}' over the serial
connection
"""
command = (''.join(('WD', str(pin_number), ':',
str(digital_value)))).encode()
self.conn.write(command)
def analog_read(self, pin_number):
"""
Performs an analog read on pin_number and returns the value (0 to 1023)
Internally sends b'RA{pin_number}' over the serial connection
"""
command = (''.join(('RA', str(pin_number)))).encode()
self.conn.write(command)
line_received = self.conn.readline().decode().strip()
header, value = line_received.split(':') # e.g. A4:1
if header == ('A'+ str(pin_number)):
# If header matches
return int(value)
def room_read(self):
"""
Performs an analog read on pin_number and returns the value (0 to 1023)
Internally sends b'H' over the serial connection
"""
command = (''.join(('HA', str(1)))).encode()
self.conn.write(command)
line_received = self.conn.readline().decode().strip()
return line_received
def analog_write(self, pin_number, analog_value):
"""
Writes the analog value (0 to 255) on pin_number
Internally sends b'WA{pin_number}:{analog_value}' over the serial
connection
"""
command = (''.join(('WA', str(pin_number), ':',
str(analog_value)))).encode()
self.conn.write(command)
def emergency(self):
"""
"""
self.conn.write("E1")
def normal(self):
"""
"""
self.conn.write("E0")
def close(self):
"""
To ensure we are properly closing our connection to the
Arduino device.
"""
self.conn.close()
print 'Connection to Arduino closed'
| [
"[email protected]"
] | |
8ddecc8c839bd9e0e461683ea3660a975e26f939 | 458a624482f50e9148869dfda843e64e0ad3d0a1 | /confusion-character-replacement/confusion-replacer.py | b6e23c056be2661857bd93275ab667560d496d2a | [] | no_license | Sangeerththan/OCRSinhala | 3e5b3e53d7c0342ab77155505a1154aa984d9695 | e6c0d5f18889efed2b62bec5193c28ed0a7e37c9 | refs/heads/master | 2020-05-02T12:40:43.926063 | 2019-12-08T09:02:37 | 2019-12-08T09:02:37 | 177,964,297 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,010 | py | import codecs
g = codecs.open("dic.txt", encoding="utf-8")
dic_words = g.read().split()
g.close
confuse_groups=[]
with codecs.open("confusion groups.txt", encoding="utf-8") as f:
confuse_groups = f.readlines()
f.close()
confuse_list=[]
for i in range(0,len(confuse_groups)):
confuse_list.append(confuse_groups[i].split())
g = codecs.open("text.txt", encoding="utf-8")
word = g.read()
g.close
s = codecs.open("corrected.txt","w+",encoding="utf-8" )
if (word in dic_words):
print("true")
s.write(word)
else:
print("false")
correct_word=word
found_correct_word = False
for j in range(0,len(word)):
for k in range(0,len(confuse_list)):
if(word[j] in confuse_list[k]):
for m in range(0,len(confuse_list[k])):
if(word[0:j]+confuse_list[k][m]+word[j+1:] in dic_words):
correct_word=word[0:j]+confuse_list[k][m]+word[j+1:]
found_correct_word = True
break
if(found_correct_word):
break
if(found_correct_word):
break
s.write(correct_word)
s.close()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.