blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3ca771e19dc6b23d14b4a8164764a44e5830a529 | 03195a6f98396fd27aedc3c06d81f1553fb1d16b | /pandas/core/_numba/executor.py | 0b59d0717a476b949054b145952a0c044d5e15b9 | [
"BSD-3-Clause"
] | permissive | huaxz1986/pandas | a08d80d27726fe141d449835b9a09265bca5b5e0 | ba2473834fedcf571d3f8245b4b24796873f2736 | refs/heads/master | 2023-06-11T02:20:14.544220 | 2022-01-12T04:40:06 | 2022-01-12T04:40:06 | 131,370,494 | 3 | 4 | BSD-3-Clause | 2018-04-28T03:51:05 | 2018-04-28T03:51:05 | null | UTF-8 | Python | false | false | 1,726 | py | from __future__ import annotations
from typing import (
TYPE_CHECKING,
Callable,
)
import numpy as np
from pandas._typing import Scalar
from pandas.compat._optional import import_optional_dependency
from pandas.core.util.numba_ import (
NUMBA_FUNC_CACHE,
get_jit_arguments,
)
def generate_shared_aggregator(
func: Callable[..., Scalar],
engine_kwargs: dict[str, bool] | None,
cache_key_str: str,
):
"""
Generate a Numba function that loops over the columns 2D object and applies
a 1D numba kernel over each column.
Parameters
----------
func : function
aggregation function to be applied to each column
engine_kwargs : dict
dictionary of arguments to be passed into numba.jit
cache_key_str: str
string to access the compiled function of the form
<caller_type>_<aggregation_type> e.g. rolling_mean, groupby_mean
Returns
-------
Numba function
"""
nopython, nogil, parallel = get_jit_arguments(engine_kwargs, None)
cache_key = (func, cache_key_str)
if cache_key in NUMBA_FUNC_CACHE:
return NUMBA_FUNC_CACHE[cache_key]
if TYPE_CHECKING:
import numba
else:
numba = import_optional_dependency("numba")
@numba.jit(nopython=nopython, nogil=nogil, parallel=parallel)
def column_looper(
values: np.ndarray,
start: np.ndarray,
end: np.ndarray,
min_periods: int,
*args,
):
result = np.empty((len(start), values.shape[1]), dtype=np.float64)
for i in numba.prange(values.shape[1]):
result[:, i] = func(values[:, i], start, end, min_periods, *args)
return result
return column_looper
| [
"[email protected]"
] | |
8baf39710e255504a040b81bb6999e6e90b09408 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnn998.py | 6856125ce675d837a7794fd06121ad13f12b0ccf | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 120 | py | ii = [('UnitAI.py', 2), ('WadeJEB.py', 1), ('MereHHB3.py', 4), ('StorJCC.py', 2), ('SomeMMH.py', 2), ('MereHHB2.py', 1)] | [
"[email protected]"
] | |
fc668b0f4beb102abcf466f2f54e0323dd94b77f | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /k9usvZ8wfty4HwqX2_2.py | 6df3da8982061b94fd50d4d07581a39b1c4e148e | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,045 | py | """
Create a function to check whether a given number is **Cuban Prime**. A cuban
prime is a prime number that is a solution to one of two different specific
equations involving third powers of x and y. For this challenge we are only
concerned with the cuban numbers from the **first equation**. We **ignore**
the cuban numbers from the **second equation**.
### Equation Form
p = (x^3 - y^3)/(x - y), x = y + 1, y > 0
... and the first few cuban primes from this equation are 7, 19, 37, 61, 127,
271.
### Examples
cuban_prime(7) ➞ "7 is cuban prime"
cuban_prime(9) ➞ "9 is not cuban prime"
cuban_prime(331) ➞ "331 is cuban prime"
cuban_prime(40) ➞ "40 is not cuban prime"
### Notes
* The inputs are positive integers only.
* Check the **Resources** for help.
"""
is_prime=lambda p:p>1and all(p%i for i in range(2,int(p**0.5+1)))
def cuban_prime(n):
for y in range(n):
if n==3*y**2+3*y+1 and is_prime(n):return str(n)+' is cuban prime'
return str(n)+' is not cuban prime'
| [
"[email protected]"
] | |
f4f5aba0f8f2e294996ec623c74604d180bfc276 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/2D_20200722181027.py | ea6e6f492e2b93ebfeedfabbb4e5edb694f6f6ce | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,348 | py | def array(n,m):
# where n is row size and m is column size
array = [[0 for x in range(n)] for x in range(m)]
print(array)
a = [[2, 4, 6, 8, 10], [3, 6, 9, 12, 15], [4, 8, 12, 16, 20]]
# where the first arguement reps the row and second arguement reps the column
print(a[0][3])
from sys import maxint
def hourGlass(arr):
# you have a 2d array
# get max hour glass
# var maxCount to keep record of the max count
# what do you know about an hourglass
# the indicies fall in a pattern where
# i and i+2 are not equal to 0 and i + 1 is equal to 0
maxCount = - maxint
if arr !=[]:
for i in range(len(arr)-2):
totalCount = 0
# remember j is looping through arr[i]
for j in range(len(arr[i])-2):
totalCount = arr[i][j] + arr[i][j+1] + arr[i][j+2] + arr[i+1][j+1] + arr[i+2][j] + arr[i+2][j+1] + arr[i+2][j+2]
print('total',totalCount)
if totalCount > maxCount:
maxCount = totalCount
print(maxCount)
else:
return 0
print(hourGlass([[-1,-1,0,-9,-2,-2],[-2,-1,-6,-8,-2,-5],[-1,-1,-1,-2,-3,-4],[-1,-9,2,-4,-4,-5],[-7,-3,-3,-2,-9,-9],[-1,-3,-1,-2,-4,-5]])) | [
"[email protected]"
] | |
ef456e67563e978d78cbc6a2c22cf101e2d80c1b | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/adjur.py | b95db66e7aeb7eacf64d904654b1562db5591749 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 370 | py | ii = [('GodwWSL2.py', 4), ('FerrSDO3.py', 1), ('CarlTFR.py', 2), ('LyttELD.py', 1), ('TalfTAC.py', 2), ('KiddJAE.py', 1), ('BailJD1.py', 1), ('ClarGE.py', 1), ('LandWPA.py', 1), ('AinsWRR.py', 1), ('LandWPA2.py', 2), ('TalfTIT.py', 1), ('NewmJLP.py', 1), ('SoutRD.py', 1), ('HowiWRL2.py', 1), ('BailJD3.py', 1), ('HogaGMM.py', 1), ('AinsWRR2.py', 3), ('HogaGMM2.py', 1)] | [
"[email protected]"
] | |
99f8c1a49641c470c778fea08467ebaf332d4693 | 8997a0bf1e3b6efe5dd9d5f307e1459f15501f5a | /graph__networkx__d3__dot_graphviz/graphviz__examples/generate__as__bytes.py | cad13511e6c2200cf6958416c256790986119d81 | [
"CC-BY-4.0"
] | permissive | stepik/SimplePyScripts | 01092eb1b2c1c33756427abb2debbd0c0abf533f | 3259d88cb58b650549080d6f63b15910ae7e4779 | refs/heads/master | 2023-05-15T17:35:55.743164 | 2021-06-11T22:59:07 | 2021-06-11T22:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# pip install graphviz
from graphviz import Digraph
g = Digraph('G', format='svg')
g.edge('Hello', 'World')
# Get bytes
print(g.pipe())
print(g.pipe('png'))
# OR:
# g.format = 'png'
# print(g.pipe())
print(g.pipe('pdf'))
| [
"[email protected]"
] | |
00b1a11eae7b2cec07120665c6de7285c8bbdae3 | 7ce479cac0a14d924159db9c784e3325b8f0bce7 | /schemaorgschemas/Thing/Intangible/Enumeration/MedicalImagingTechnique/Ultrasound/__init__.py | f0e44756fdb174fb8619176fe9fda3fa72543f5a | [] | no_license | EvelineAndreea/AGRe | 1f0c27237eb047a60bbcfb8d73e3157035406409 | b952125896a82741f6617c259dd4060954583180 | refs/heads/master | 2020-04-08T16:08:11.517166 | 2018-11-28T07:15:56 | 2018-11-28T07:15:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | # -*- coding: utf-8 -*-
from schemaorgschemas.djangoschema import SchemaObject, SchemaProperty, SchemaEnumProperty, SCHEMA_ORG
from django.conf import settings
class UltrasoundSchema(SchemaObject):
"""Schema Mixin for Ultrasound
Usage: place after django model in class definition, schema will return the schema.org url for the object
Ultrasound imaging.
"""
def __init__(self):
self.schema = 'Ultrasound'
# schema.org version 2.0
| [
"[email protected]"
] | |
0e647dd279872f9ca98db25c23550b1a1e7e5fb4 | df83f97ed2c6dd199005e96bc7c494cfb3b49f8c | /GeeksForGeeks/String Rotations.py | 42ed217509cdfcaf23e1e662e437f71bfb0dfa7b | [] | no_license | poojan14/Python-Practice | 45f0b68b0ad2f92bbf0b92286602d64f3b1ae992 | ed98acc788ba4a1b53bec3d0757108abb5274c0f | refs/heads/master | 2022-03-27T18:24:18.130598 | 2019-12-25T07:26:09 | 2019-12-25T07:26:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | '''
Given strings s1 and s2, you need to find if s2 is a rotated version of the string s1. The strings are lowercase.
'''
if __name__ == '__main__':
T = int(input())
for _ in range(T):
s1 = input()
s2 = input()
if len(s1)==len(s2):
tmp = s1+s1 # It gives all possible rotations
if s2 in tmp : print(1) # of a string.
else : print(0)
else:
print(0)
| [
"[email protected]"
] | |
e172c4d221cb93b78fdf15d990b35e7e7e7fd500 | 48894ae68f0234e263d325470178d67ab313c73e | /scripts/noc-wf.py | 9a461df838cfb1119d145697b6241de9a1a2e87f | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 663 | py | #!./bin/python
# -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## noc-wf daemon
##----------------------------------------------------------------------
## Copyright (C) 2007-2011 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
if __name__ == "__main__":
from noc.wf.wf.daemon import WFDaemon
from noc.lib.debug import error_report
from noc.main.models import CustomField
CustomField.install_fields()
try:
WFDaemon().process_command()
except SystemExit:
pass
except Exception:
error_report()
| [
"[email protected]"
] | |
d3905ca9265658e5bf4b7a91a378ed0ea340b520 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.0_rd=1_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=82/sched.py | 304905f0cc9f12230fa3ed58eca351b59ad910a9 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | -X FMLP -Q 0 -L 2 105 400
-X FMLP -Q 0 -L 2 85 250
-X FMLP -Q 0 -L 2 70 250
-X FMLP -Q 1 -L 2 66 200
-X FMLP -Q 1 -L 2 64 250
-X FMLP -Q 1 -L 2 50 200
-X FMLP -Q 2 -L 1 41 150
-X FMLP -Q 2 -L 1 40 125
-X FMLP -Q 2 -L 1 34 100
-X FMLP -Q 3 -L 1 33 200
-X FMLP -Q 3 -L 1 20 250
-X FMLP -Q 3 -L 1 10 100
| [
"[email protected]"
] | |
9c726b92873e564d1807d53aeb25eb416f88fba3 | e6c65e2e354336a4bea5b6a4ccbccd3682915fe2 | /out-bin/py/google/fhir/seqex/bundle_to_seqex_test.runfiles/pypi__apache_beam_2_9_0/apache_beam/runners/worker/sideinputs_test.py | 57d59bfa69ad81880b5237c6baf3ea3f0406a320 | [
"Apache-2.0"
] | permissive | rasalt/fhir-datalab | c30ab773d84983dd04a37e9d0ddec8bf2824b8a4 | 3e329fc8b4226d3e3a4a7c23c306a86e7a9ea0de | refs/heads/master | 2021-10-09T05:51:04.593416 | 2018-12-21T18:11:03 | 2018-12-22T05:38:32 | 162,744,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/0ddaa3627472ad9d1367a008236ce2f5/external/pypi__apache_beam_2_9_0/apache_beam/runners/worker/sideinputs_test.py | [
"[email protected]"
] | |
51c188fc3582d89f30984fe761bd4de74c07d286 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_24247.py | f41dd9eb54effc2fae8b2b76ddc93da38babc1a1 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,840 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((591.127, 550.172, 433.724), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((558.151, 528.977, 490.027), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((514.189, 493.443, 549.935), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((541.078, 422.008, 433.053), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((416.095, 453.45, 712.259), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((560.441, 539.013, 466.666), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((560.484, 539.715, 465.403), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((552.11, 550.537, 440.768), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((528.693, 538.373, 431.649), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((511.774, 558.115, 441.881), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((491.711, 546.71, 425.922), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((490.69, 571.213, 412.926), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((585.097, 551.597, 459.839), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((395.497, 593.978, 372.046), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((354.731, 547.327, 564.438), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((354.731, 547.327, 564.438), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((381.51, 546.932, 552.901), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((408.798, 544.379, 541.772), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((436.698, 538.561, 532.225), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((464.967, 534.516, 524.764), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((492.885, 537.772, 518.397), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((513.156, 551.557, 503.757), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((263.588, 534.134, 438.154), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((762.458, 564.868, 573.574), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((516.971, 541.55, 550.287), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((516.971, 541.55, 550.287), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((524.701, 520.043, 532.301), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((518.156, 496.095, 517.283), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((522.974, 469.313, 527.494), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((597.329, 492.17, 431.012), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((450.148, 440.633, 626.042), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((567.05, 508.111, 472.181), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((567.18, 507.83, 472.003), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((539.941, 505.099, 467.782), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((516.22, 513.736, 455.831), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((495.42, 532.128, 457.926), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((478.453, 554.246, 457.003), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((483.011, 579.139, 445.049), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((500.654, 596.305, 432.034), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((545.229, 600.839, 505.286), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((453.3, 585.077, 359.443), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((569.881, 572.648, 532.303), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((564.77, 547.607, 527.181), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((551.494, 493.359, 514.045), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((536.082, 439.216, 501.45), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((608.487, 438.957, 465.359), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((461.56, 366.071, 509.789), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((575.251, 474.626, 465.989), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((595.039, 471.362, 485.911), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((591.024, 491.415, 505.418), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((580.611, 493.353, 531.521), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((564.708, 490.26, 555.028), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((553.024, 482.707, 580.407), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((570.894, 525.42, 514.688), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((533.826, 440.217, 648.202), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
537cc1b377a1a29fe985de13d1284703ca373594 | ebcc40516adba151e6a1c772223b0726899a26eb | /slicedimage/url/__init__.py | 903fa8c5d102018aed1a5b5cd312397b50a9e499 | [
"MIT"
] | permissive | spacetx/slicedimage | acf4a767f87b6ab78e657d85efad22ee241939f4 | eb8e1d3899628db66cffed1370f2a7e6dd729c4f | refs/heads/master | 2021-04-09T10:53:15.057821 | 2020-05-26T17:40:11 | 2020-05-26T17:40:11 | 125,316,414 | 7 | 4 | MIT | 2020-05-26T17:40:15 | 2018-03-15T05:24:24 | Python | UTF-8 | Python | false | false | 19 | py | from . import path
| [
"[email protected]"
] | |
ade4325ffae0867072eb07d5294917e637b30a23 | de4d26a724b966ca8d0b95ec3063b5b784129028 | /UserData/UserApp/migrations/0002_auto_20190402_0505.py | cc02790701761a7d0486f6803b359929ae666412 | [] | no_license | ChetanKoranga/UserRESTapi | 88904a326a093842ad68628eed98ea5ca2a95de0 | 11342bef21be163c4faf79744e90e9848e3a89bf | refs/heads/master | 2020-05-04T00:01:22.998117 | 2019-04-02T05:51:18 | 2019-04-02T05:51:18 | 178,876,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | # Generated by Django 2.2 on 2019-04-02 05:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('UserApp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='usermodel',
name='zip',
field=models.CharField(max_length=10),
),
]
| [
"[email protected]"
] | |
a003a04f25ae531bcff5fcc6b77658dab1d893f8 | ca82e3c6084e697ecbdbf32d96c08293c5540287 | /courses/python_data_structures_linked_lists/Exercise Files/Ch05/05_01/End/dll.py | 50cac5b09fdcbb4bdfd6e43e8d6640dcd496bb4e | [] | no_license | bheki-maenetja/small-projects-py | 8c8b35444ff2ecef7ad77e709392a9c860967ecc | 18504d2e1f1ea48b612a4e469828682f426c9704 | refs/heads/master | 2023-08-17T00:38:06.208787 | 2023-08-16T16:25:22 | 2023-08-16T16:25:22 | 131,871,876 | 1 | 0 | null | 2023-08-14T23:44:23 | 2018-05-02T15:37:58 | Python | UTF-8 | Python | false | false | 1,390 | py | class DLLNode:
def __init__(self, data):
self.data = data
self.next = None
self.previous = None
def __repr__(self):
return "DLLNode object: data={}".format(self.data)
def get_data(self):
"""Return the self.data attribute."""
return self.data
def set_data(self, new_data):
"""Replace the existing value of the self.data attribute with new_data
parameter."""
self.data = new_data
def get_next(self):
"""Return the self.next attribute"""
return self.next
def set_next(self, new_next):
"""Replace the existing value of the self.next attribute with new_next
parameter."""
self.next = new_next
def get_previous(self):
"""Return the self.previous attribute"""
return self.previous
def set_previous(self, new_previous):
"""Replace the existing value of the self.previous attribute with
new_previous parameter."""
self.previous = new_previous
class DLL:
def __init__(self):
self.head = None
def __repr__(self):
return "<DLL object: head=>".format(self.head)
def is_empty(self):
return self.head is None
def size(self):
pass
def search(self, data):
pass
def add_front(self, data):
pass
def remove(self, data):
pass
| [
"[email protected]"
] | |
f3c5d20d29dd9b88627ce522e66785298e8855f1 | 498fcf34fa4482be5c9fefc488666e60edcf46c7 | /supervised_learning/0x08-deep_cnns/6-transition_layer.py | b1f56c159fcbde725fe51e00dbf6f594f96be8dd | [] | no_license | MansourKef/holbertonschool-machine_learning | 7dbc465def04c311c1afb0e8b8903cbe34c72ad3 | 19f78fc09f0ebeb9f27f3f76b98e7a0e9212fd22 | refs/heads/main | 2023-03-12T16:18:08.919099 | 2021-03-05T09:42:09 | 2021-03-05T09:42:09 | 317,303,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | #!/usr/bin/env python3
"""module"""
import tensorflow.keras as K
def transition_layer(X, nb_filters, compression):
"""function"""
BN1 = K.layers.BatchNormalization(axis=3)(X)
Relu1 = K.layers.Activation("relu")(BN1)
conv1 = K.layers.Conv2D(int(compression * nb_filters),
kernel_size=(1, 1),
padding="same",
kernel_initializer="he_normal",
strides=(1, 1))(Relu1)
pool5 = K.layers.AveragePooling2D(pool_size=(2, 2),
strides=(2, 2))(conv1)
return pool5, int(compression * nb_filters)
| [
"[email protected]"
] | |
270b750136f37b35a8ec6301de7546fe80dc514e | 8186514b510a801863229e3f9711c0c657e727e5 | /assembly/qtable/qlist_q.py | c4d46f59661410f1d3c06c6df3d6c2b23370a997 | [] | no_license | masknugget/mypyqt | 274b2cbbf66c04927453815248f9c1bc5e65ca17 | b86a49e4b8c7c8c3d8546ce1b49f8f3bb6332307 | refs/heads/main | 2023-08-17T13:30:11.451066 | 2021-09-27T14:14:54 | 2021-09-27T14:14:54 | 355,904,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,241 | py | # 自定义控件--实现了一个带全选功能的复选框
import sys
from PyQt5.QtWidgets import QApplication, QListWidget, QCheckBox, QListWidgetItem
from PyQt5.QtCore import Qt
class FilteredList(QListWidget):
# 继承自列表控件
def __init__(self, textList, parent=None):
super().__init__(parent)
self.selectAll_ch = QCheckBox("全选(selectAll)")
self.selectAll_ch.setCheckState(Qt.Checked)
self.selectAll_ch.stateChanged[int].connect(self.on_selectAll) #
item = QListWidgetItem(self)
self.setItemWidget(item, self.selectAll_ch) # 列表控件的项设为 QCheckBox
self.dict = dict()
self.boxes = set()
for index, text in enumerate(textList):
ch = QCheckBox(text)
ch.setCheckState(Qt.Unchecked)
ch.stateChanged[int].connect(self.on_stateChanged)
# item.setCheckState(Qt.Unchecked)#
item = QListWidgetItem(self)
self.setItemWidget(item, ch)
self.boxes.add(ch)
self.dict[index] = ch
def on_selectAll(self, state):
if state == 2:
for ch in self.boxes:
ch.setCheckState(2)
if state == 0:
for ch in self.boxes:
ch.setCheckState(0)
def on_stateChanged(self, state):
ch = self.sender()
if state:
if len([ch for ch in self.boxes if ch.checkState()]) == self.count() - 1:
# 0 不选中, 1 部分选中,2 全选中 #Qt.Unchecked #Qt.PartiallyChecked #Qt.Checked
self.selectAll_ch.setCheckState(2)
else:
self.selectAll_ch.setCheckState(1)
else:
if len([k for k in self.boxes if k.checkState()]):
self.selectAll_ch.setCheckState(1)
else:
self.selectAll_ch.setCheckState(0)
def keyPressEvent(self, event):
# Ctrl+A 全选
if event.modifiers() & Qt.ControlModifier and event.key() == Qt.Key_A:
self.selectAll_ch.setCheckState(2)
if __name__ == '__main__':
app = QApplication(sys.argv)
myList = FilteredList(textList=["a", "b", "c", "d"])
myList.show()
sys.exit(app.exec_()) | [
"[email protected]"
] | |
2282495d9f9f1ac8079c3e9d8dbe84bc6f9a1e8d | edbf80fb7ae7f411aaa1bdc58c1c5ed96c7aeec5 | /app/gateways/OwlveyGateway.py | cd8c37ae458588cbdbea35a4e823f9290733298f | [
"Apache-2.0"
] | permissive | owlvey/power_tools | 3eff4339855e6731b600915732f2a0a011688de8 | cec83fb13a21ebd0592f8d203cc3705101c109b8 | refs/heads/master | 2022-07-18T07:55:17.259885 | 2020-05-15T14:21:20 | 2020-05-15T14:21:20 | 263,683,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,664 | py | import datetime
import requests
from app.components.ConfigurationComponent import ConfigurationComponent
class OwlveyGateway:
def __init__(self, configuration_component: ConfigurationComponent):
self.host = configuration_component.owlvey_url
self.identity = configuration_component.owlvey_identity
self.token = None
self.token_on = None
self.client_id = configuration_component.owlvey_client
self.client_secret = configuration_component.owlvey_secret
@staticmethod
def __validate_status_code(response):
if response.status_code > 299:
raise ValueError(response.text)
def generate_token(self):
payload = {
"grant_type": "client_credentials",
"scope": "api",
"client_id": self.client_id,
"client_secret": self.client_secret
}
response = requests.post(self.identity + "/connect/token",
data=payload)
OwlveyGateway.__validate_status_code(response)
self.token_on = datetime.datetime.now()
self.token = response.json()
def __build_authorization_header(self):
if self.token:
expires_in = self.token["expires_in"]
if (self.token_on + datetime.timedelta(seconds=expires_in + 30)) > datetime.datetime.now():
self.generate_token()
else:
self.generate_token()
return {
"Authorization": "Bearer " + self.token["access_token"]
}
def __internal_get(self, url):
response = requests.get(url,
headers=self.__build_authorization_header(),
verify=False)
OwlveyGateway.__validate_status_code(response)
return response.json()
def __internal_put(self, url, payload):
response = requests.put(url, json=payload,
headers=self.__build_authorization_header(),
verify=False)
OwlveyGateway.__validate_status_code(response)
def __internal_delete(self, url, payload):
response = requests.delete(url, json=payload,
headers=self.__build_authorization_header(),
verify=False)
OwlveyGateway.__validate_status_code(response)
def __internal_post(self, url, payload):
response = requests.post(url, json=payload,
headers=self.__build_authorization_header(),
verify=False)
OwlveyGateway.__validate_status_code(response)
return response.json()
def get_customers(self):
return self.__internal_get(self.host + "/customers")
def get_customer(self, name):
customers = self.get_customers()
for cus in customers:
if cus['name'] == name:
return cus
return None
def get_products(self, organization_id):
return self.__internal_get(self.host + '/products?customerId={}'.format(organization_id))
def get_product(self, organization_id, name):
products = self.get_products(organization_id)
for product in products:
if product['name'] == name:
return product
return None
def get_syncs(self, product_id):
return self.__internal_get(self.host + "/products/{}/sync".format(product_id))
def post_sync(self, product_id, name):
return self.__internal_post(self.host + "/products/{}/sync/{}".format(product_id, name), {})
def put_last_anchor(self, product_id, name, target):
self.__internal_put(self.host + "/products/{}/sync/{}".format(product_id, name),
{"target": target.isoformat()})
def get_features(self, product_id):
return self.__internal_get(self.host + "/features?productId={}".format(product_id))
def get_feature_detail(self, feature_id):
return self.__internal_get(self.host + "/features/{}".format(feature_id))
def create_customer(self, name):
return self.__internal_post(self.host + "/customers", {"name": name})
def create_product(self, customer_id, name):
return self.__internal_post(self.host + "/products", {"customerId": customer_id, "name": name})
def create_service(self, product_id, name, slo):
service = self.__internal_post(self.host + "/services", {"productId": product_id, "name": name})
service_id = service["id"]
service["slo"] = slo
self.__internal_put(self.host + "/services/" + str(service_id), service)
return service
def assign_indicator(self, feature_id, source_id):
return self.__internal_put(self.host + "/features/{}/indicators/{}".format(feature_id, source_id), {})
def un_assign_indicator(self, feature_id, source_id):
return self.__internal_delete(self.host + "/features/{}/indicators/{}".format(feature_id, source_id), {})
def create_feature(self, product_id, name):
return self.__internal_post(self.host + "/features", {"productId": product_id, "name": name})
def create_incident(self, product_id, key, title, resolution_on: datetime, ttd, tte, ttf, url):
response = requests.post(self.host + "/incidents", json={"productId": product_id,
"key": key,
"title": title
},
verify=False)
OwlveyGateway.__validate_status_code(response)
incident_id = response.json()["id"]
response = requests.put(self.host + "/incidents/{}".format(incident_id),
json={"title": title, "ttd": ttd, "tte": tte, "ttf": ttf, "url": url,
"affected": 1,
"end": resolution_on.isoformat()},
verify=False)
OwlveyGateway.__validate_status_code(response)
return response.json()
def assign_incident_feature(self, incident_id, feature_id):
response = requests.put(self.host + "/incidents/{}/features/{}".format(incident_id, feature_id),
verify=False)
OwlveyGateway.__validate_status_code(response)
def get_sources(self, product_id):
return self.__internal_get(self.host + "/sources?productId={}".format(product_id))
def create_source(self, product_id, name, kind, group,
good_definition: str = "", total_definition: str = ""):
result = self.__internal_post(self.host + "/sources",
{
"productId": product_id,
"name": name,
"kind": kind,
"group": group
})
result["goodDefinition"] = good_definition
result["totalDefinition"] = total_definition
self.__internal_put(self.host + "/sources/{}".format(result["id"]), result)
return result
def create_sli(self, feature_id, source_id):
self.__internal_put(self.host + "/features/{}/indicators/{}".format(feature_id, source_id), {})
def search_feature(self, product_id, name):
return self.__internal_get(self.host + "/features/search?productId={}&name={}".format(product_id, name))
def create_source_item(self, source_id, start, total, good):
return self.__internal_post(self.host + "/sourceItems",
{
"sourceId": source_id,
"start": start.isoformat(),
"end": start.isoformat(),
"total": int(total),
"good": int(good)
})
def create_source_item_proportion(self, source_id, start, percent):
result = self.__internal_post(self.host + "/sourceItems/proportion",
{
"sourceId": source_id,
"start": start.isoformat(),
"end": start.isoformat(),
"proportion": percent,
})
return result
| [
"[email protected]"
] | |
12655a75caf61802783410d883ae5ec5680cefe5 | b77cc1448ae2c68589c5ee24e1a0b1e53499e606 | /asset/migrations/0005_auto_20171026_1532.py | eb4e2ea65956f0a359a6c7516eb7dbb444b94e2a | [] | no_license | PregTech-c/Hrp_system | a5514cf6b4c778bf7cc58e8a6e8120ac7048a0a7 | 11d8dd3221497c536dd7df9028b9991632055b21 | refs/heads/master | 2022-10-09T07:54:49.538270 | 2018-08-21T11:12:04 | 2018-08-21T11:12:04 | 145,424,954 | 1 | 1 | null | 2022-10-01T09:48:53 | 2018-08-20T13:58:31 | JavaScript | UTF-8 | Python | false | false | 664 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-10-26 12:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('asset', '0004_auto_20171022_1404'),
]
operations = [
migrations.AddField(
model_name='asset',
name='model',
field=models.CharField(default='test', max_length=32),
preserve_default=False,
),
migrations.AlterField(
model_name='asset',
name='description',
field=models.CharField(max_length=256, null=True),
),
]
| [
"[email protected]"
] | |
33282c89da89f060278ed17e50013ffdb1f88707 | 455c1cec4101254a0b7f50349e915411033a0af1 | /supervised_learning/0x00-binary_classification/9-neural_network.py | 5f65dc0fea7fe410b59fbce3194f1ddcd97e815b | [] | no_license | Daransoto/holbertonschool-machine_learning | 30c9f2753463d57cac87f245b77c8d6655351e75 | 1e7cd1589e6e4896ee48a24b9ca85595e16e929d | refs/heads/master | 2021-03-10T14:32:09.419389 | 2020-10-23T19:47:31 | 2020-10-23T19:47:31 | 246,461,514 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,290 | py | #!/usr/bin/env python3
""" Creates a neural network. """
import numpy as np
class NeuralNetwork:
""" Neural network class. """
def __init__(self, nx, nodes):
""" Initializer for the neural network. """
if type(nx) != int:
raise TypeError('nx must be an integer')
if nx < 1:
raise ValueError('nx must be a positive integer')
if type(nodes) != int:
raise TypeError('nodes must be an integer')
if nodes < 1:
raise ValueError('nodes must be a positive integer')
self.__W1 = np.random.randn(nodes, nx)
self.__b1 = np.zeros((nodes, 1))
self.__A1 = 0
self.__W2 = np.random.randn(1, nodes)
self.__b2 = 0
self.__A2 = 0
@property
def W1(self):
""" Getter for W1. """
return self.__W1
@property
def b1(self):
""" Getter for b1. """
return self.__b1
@property
def A1(self):
""" Getter for A1. """
return self.__A1
@property
def W2(self):
""" Getter for W2. """
return self.__W2
@property
def b2(self):
""" Getter for b2. """
return self.__b2
@property
def A2(self):
""" Getter for A2. """
return self.__A2
| [
"[email protected]"
] | |
f10d585c637387ccc269aab61ce295e13ab11663 | 321e58ab3e6b2385bb3549aaaefd56a58c2a51e7 | /python/atpic/perf_postgres.py | 3c2b1312c886a38a2fa3d9e62deeb883a4697fb5 | [] | no_license | alexmadon/atpic_photosharing | 7829118d032344bd9a67818cd50e2c27a228d028 | 9fdddeb78548dadf946b1951aea0d0632e979156 | refs/heads/master | 2020-06-02T15:00:29.282979 | 2017-06-12T17:09:52 | 2017-06-12T17:09:52 | 94,095,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,408 | py | import atpic.database
import time
import pycurl
import StringIO
import cStringIO
time1=time.time()
for i in range(1,100):
print i
con=atpic.database.connect()
listofdict=atpic.database.query("select 1",con)
con.close()
time2=time.time()
print "=========="
con=atpic.database.connect()
for i in range(1,100):
print i
query="select id from artist_pic where id='%i'" % i
listofdict=atpic.database.query(query,con)
con.close()
time3=time.time()
# using Solr + curl new curl handle each time (new socket)
#fp=open("/dev/null","w")
fp=cStringIO.StringIO()
for i in range(1,100):
print i
url="http://localhost:8983/solr/select/?q=pid:%i&fl=pid" % i
c=pycurl.Curl()
# c.setopt(c.WRITEDATA,fp);
c.setopt(c.WRITEFUNCTION, fp.write)
c.setopt(c.URL, url);
c.perform()
c.close()
# print data
fp.close()
time4=time.time()
# using Solr + curl same curl handle
c=pycurl.Curl()
fp=cStringIO.StringIO()
for i in range(1,100):
print i
#c.setopt(c.WRITEDATA,fp);
url="http://localhost:8983/solr/select/?q=pid:%i&fl=pid" % i
c.setopt(c.WRITEFUNCTION, fp.write)
c.setopt(c.URL, url);
c.perform()
c.close()
fp.close()
time5=time.time()
print "Time1 %s" % (time2-time1)
print "Time2 %s" % (time3-time2)
print "Ratio=%f" % ((time2-time1)/(time3-time2))
print "Time3 %s" % (time4-time3)
print "Time4 %s" % (time5-time4)
| [
"[email protected]"
] | |
06f952c695c3533ca0dd029f3e93895af5b02c59 | 5c8139f1e57e06c7eaf603bd8fe74d9f22620513 | /PartB/py删除链表的倒数第n个节点的位置的值2.py | ab9093a8ca2755b9b1f62111641d210996e07d4a | [] | no_license | madeibao/PythonAlgorithm | c8a11d298617d1abb12a72461665583c6a44f9d2 | b4c8a75e724a674812b8a38c0202485776445d89 | refs/heads/master | 2023-04-03T07:18:49.842063 | 2021-04-11T12:02:40 | 2021-04-11T12:02:40 | 325,269,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 915 | py |
# 把一个链表的倒数的第n个节点来进行删除。
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def remove(self, head, n):
dummy = ListNode(-1)
dummy.next = head
slow = dummy
fast = dummy
for i in range(n):
fast = fast.next
while fast and fast.next:
fast = fast.next
slow = slow.next
slow.next = slow.next.next
return dummy.next
if __name__ == "__main__":
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n5 = ListNode(5)
n6 = ListNode(6)
n1.next = n2
n2.next = n3
n3.next = n4
n4.next = n5
n5.next = n6
n6.next = None
k = 2
res = s.remove(n1, k)
while res:
print(res.val, end="->")
res = res.next
| [
"[email protected]"
] | |
9ac60f6dc3755d4c8f3c20fd4d1cd54718994a90 | 2faf152deabb0476ac43d4754f3b529fd678a36d | /ch_18.py | 3d923149df97df02941390334db1bf1ff1f74392 | [] | no_license | Sakartu/matasano | 46cba1325a01c41f6272f80b9fa698c6338c2e50 | b42e5a2ce5daa2fcc6691873e995a4b0d05e03d2 | refs/heads/master | 2021-01-23T09:51:50.305296 | 2015-08-10T15:37:59 | 2015-08-10T15:37:59 | 32,535,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
test_ctr.py
"""
import base64
import util
__author__ = 'peter'
def main():
test = base64.b64decode('L77na/nrFsKvynd6HzOoG7GHTLXsTVu9qvY/2syLXzhPweyyMTJULu/6/kXX0KSvoOLSFQ==')
assert util.aes_ctr_decrypt(test, b"YELLOW SUBMARINE") == b"Yo, VIP Let's kick it Ice, Ice, baby Ice, Ice, baby "
k = util.get_random_bytes(16)
m = b'This is an interesting message'
assert util.aes_ctr_decrypt(util.aes_ctr_encrypt(m, k), k) == m
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
7c851f6cf3c45e4effa984c2a42fc8551f5c800e | a40950330ea44c2721f35aeeab8f3a0a11846b68 | /INTERACTIONS_V1/INTERACTION2/AppSBC/UI/UI.py | d3fdd88cbfb7142e29190f9222894fe2a9977d87 | [] | no_license | huang443765159/kai | 7726bcad4e204629edb453aeabcc97242af7132b | 0d66ae4da5a6973e24e1e512fd0df32335e710c5 | refs/heads/master | 2023-03-06T23:13:59.600011 | 2023-03-04T06:14:12 | 2023-03-04T06:14:12 | 233,500,005 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 35,377 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SBC(object):
def setupUi(self, SBC):
SBC.setObjectName("SBC")
SBC.resize(395, 602)
self.SBC_2 = QtWidgets.QWidget(SBC)
self.SBC_2.setObjectName("SBC_2")
self.tab_device = QtWidgets.QTabWidget(self.SBC_2)
self.tab_device.setGeometry(QtCore.QRect(10, 20, 371, 91))
self.tab_device.setTabPosition(QtWidgets.QTabWidget.West)
self.tab_device.setTabShape(QtWidgets.QTabWidget.Triangular)
self.tab_device.setElideMode(QtCore.Qt.ElideLeft)
self.tab_device.setObjectName("tab_device")
self.device = QtWidgets.QWidget()
self.device.setObjectName("device")
self.label_pump_station = QtWidgets.QLabel(self.device)
self.label_pump_station.setGeometry(QtCore.QRect(0, 20, 91, 14))
self.label_pump_station.setMinimumSize(QtCore.QSize(0, 14))
self.label_pump_station.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_pump_station.setFont(font)
self.label_pump_station.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_pump_station.setObjectName("label_pump_station")
self.ip_local = QtWidgets.QLabel(self.device)
self.ip_local.setGeometry(QtCore.QRect(180, 20, 150, 14))
self.ip_local.setMinimumSize(QtCore.QSize(75, 14))
self.ip_local.setMaximumSize(QtCore.QSize(150, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.ip_local.setFont(font)
self.ip_local.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.ip_local.setObjectName("ip_local")
self.ip_nuc = QtWidgets.QLabel(self.device)
self.ip_nuc.setGeometry(QtCore.QRect(180, 50, 160, 14))
self.ip_nuc.setMinimumSize(QtCore.QSize(160, 14))
self.ip_nuc.setMaximumSize(QtCore.QSize(170, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.ip_nuc.setFont(font)
self.ip_nuc.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.ip_nuc.setObjectName("ip_nuc")
self.led_pump_station = QtWidgets.QToolButton(self.device)
self.led_pump_station.setGeometry(QtCore.QRect(100, 20, 50, 14))
self.led_pump_station.setMinimumSize(QtCore.QSize(50, 0))
self.led_pump_station.setMaximumSize(QtCore.QSize(50, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_pump_station.setFont(font)
self.led_pump_station.setToolTip("")
self.led_pump_station.setToolTipDuration(-1)
self.led_pump_station.setObjectName("led_pump_station")
self.label_guides = QtWidgets.QLabel(self.device)
self.label_guides.setGeometry(QtCore.QRect(0, 50, 91, 14))
self.label_guides.setMinimumSize(QtCore.QSize(0, 14))
self.label_guides.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_guides.setFont(font)
self.label_guides.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_guides.setObjectName("label_guides")
self.led_guides = QtWidgets.QToolButton(self.device)
self.led_guides.setGeometry(QtCore.QRect(100, 50, 50, 14))
self.led_guides.setMinimumSize(QtCore.QSize(50, 0))
self.led_guides.setMaximumSize(QtCore.QSize(50, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_guides.setFont(font)
self.led_guides.setToolTip("")
self.led_guides.setToolTipDuration(-1)
self.led_guides.setObjectName("led_guides")
self.tab_device.addTab(self.device, "")
self.tab_device_2 = QtWidgets.QTabWidget(self.SBC_2)
self.tab_device_2.setGeometry(QtCore.QRect(10, 120, 371, 111))
self.tab_device_2.setTabPosition(QtWidgets.QTabWidget.West)
self.tab_device_2.setTabShape(QtWidgets.QTabWidget.Triangular)
self.tab_device_2.setElideMode(QtCore.Qt.ElideLeft)
self.tab_device_2.setObjectName("tab_device_2")
self.device_2 = QtWidgets.QWidget()
self.device_2.setObjectName("device_2")
self.gridLayoutWidget_4 = QtWidgets.QWidget(self.device_2)
self.gridLayoutWidget_4.setGeometry(QtCore.QRect(-10, 20, 361, 40))
self.gridLayoutWidget_4.setObjectName("gridLayoutWidget_4")
self.gridLayout_4 = QtWidgets.QGridLayout(self.gridLayoutWidget_4)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setObjectName("gridLayout_4")
self.ui_stage_show = QtWidgets.QLineEdit(self.gridLayoutWidget_4)
self.ui_stage_show.setMaximumSize(QtCore.QSize(250, 14))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_stage_show.setFont(font)
self.ui_stage_show.setObjectName("ui_stage_show")
self.gridLayout_4.addWidget(self.ui_stage_show, 0, 1, 1, 1)
self.label_stage_show = QtWidgets.QLabel(self.gridLayoutWidget_4)
self.label_stage_show.setMinimumSize(QtCore.QSize(0, 14))
self.label_stage_show.setMaximumSize(QtCore.QSize(70, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_stage_show.setFont(font)
self.label_stage_show.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_stage_show.setObjectName("label_stage_show")
self.gridLayout_4.addWidget(self.label_stage_show, 0, 0, 1, 1)
self.label_stage_show_btn = QtWidgets.QLabel(self.gridLayoutWidget_4)
self.label_stage_show_btn.setMinimumSize(QtCore.QSize(0, 14))
self.label_stage_show_btn.setMaximumSize(QtCore.QSize(70, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_stage_show_btn.setFont(font)
self.label_stage_show_btn.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_stage_show_btn.setObjectName("label_stage_show_btn")
self.gridLayout_4.addWidget(self.label_stage_show_btn, 1, 0, 1, 1)
self.btn_welcome = QtWidgets.QPushButton(self.device_2)
self.btn_welcome.setGeometry(QtCore.QRect(10, 60, 80, 20))
self.btn_welcome.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_welcome.setFont(font)
self.btn_welcome.setObjectName("btn_welcome")
self.btn_forward = QtWidgets.QPushButton(self.device_2)
self.btn_forward.setGeometry(QtCore.QRect(120, 60, 80, 20))
self.btn_forward.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_forward.setFont(font)
self.btn_forward.setObjectName("btn_forward")
self.btn_stop_forward = QtWidgets.QPushButton(self.device_2)
self.btn_stop_forward.setGeometry(QtCore.QRect(230, 60, 80, 20))
self.btn_stop_forward.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_stop_forward.setFont(font)
self.btn_stop_forward.setObjectName("btn_stop_forward")
self.btn_back_driving = QtWidgets.QPushButton(self.device_2)
self.btn_back_driving.setGeometry(QtCore.QRect(10, 80, 80, 20))
self.btn_back_driving.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_back_driving.setFont(font)
self.btn_back_driving.setObjectName("btn_back_driving")
self.btn_washing = QtWidgets.QPushButton(self.device_2)
self.btn_washing.setGeometry(QtCore.QRect(120, 80, 80, 20))
self.btn_washing.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_washing.setFont(font)
self.btn_washing.setObjectName("btn_washing")
self.btn_washing_end = QtWidgets.QPushButton(self.device_2)
self.btn_washing_end.setGeometry(QtCore.QRect(230, 80, 80, 20))
self.btn_washing_end.setMaximumSize(QtCore.QSize(80, 25))
font = QtGui.QFont()
font.setPointSize(9)
self.btn_washing_end.setFont(font)
self.btn_washing_end.setObjectName("btn_washing_end")
self.gridLayoutWidget_2 = QtWidgets.QWidget(self.device_2)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(0, 0, 341, 17))
self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2")
self.gridLayout_2 = QtWidgets.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.ui_guides_data1 = QtWidgets.QLineEdit(self.gridLayoutWidget_2)
self.ui_guides_data1.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_guides_data1.setFont(font)
self.ui_guides_data1.setObjectName("ui_guides_data1")
self.gridLayout_2.addWidget(self.ui_guides_data1, 0, 1, 1, 1)
self.label_guides_2 = QtWidgets.QLabel(self.gridLayoutWidget_2)
self.label_guides_2.setMinimumSize(QtCore.QSize(0, 14))
self.label_guides_2.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_guides_2.setFont(font)
self.label_guides_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_guides_2.setObjectName("label_guides_2")
self.gridLayout_2.addWidget(self.label_guides_2, 0, 0, 1, 1)
self.ui_guides_data2 = QtWidgets.QLineEdit(self.gridLayoutWidget_2)
self.ui_guides_data2.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_guides_data2.setFont(font)
self.ui_guides_data2.setObjectName("ui_guides_data2")
self.gridLayout_2.addWidget(self.ui_guides_data2, 0, 2, 1, 1)
self.tab_device_2.addTab(self.device_2, "")
self.tab_pumps_station = QtWidgets.QTabWidget(self.SBC_2)
self.tab_pumps_station.setGeometry(QtCore.QRect(10, 370, 371, 221))
self.tab_pumps_station.setTabPosition(QtWidgets.QTabWidget.West)
self.tab_pumps_station.setTabShape(QtWidgets.QTabWidget.Triangular)
self.tab_pumps_station.setElideMode(QtCore.Qt.ElideLeft)
self.tab_pumps_station.setObjectName("tab_pumps_station")
self.device_3 = QtWidgets.QWidget()
self.device_3.setObjectName("device_3")
self.gridLayoutWidget = QtWidgets.QWidget(self.device_3)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 10, 321, 17))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.ui_drain_data1 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.ui_drain_data1.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_drain_data1.setFont(font)
self.ui_drain_data1.setObjectName("ui_drain_data1")
self.gridLayout.addWidget(self.ui_drain_data1, 0, 1, 1, 1)
self.DRAIN = QtWidgets.QLabel(self.gridLayoutWidget)
self.DRAIN.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN.setFont(font)
self.DRAIN.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN.setObjectName("DRAIN")
self.gridLayout.addWidget(self.DRAIN, 0, 0, 1, 1)
self.ui_drain_data2 = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.ui_drain_data2.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_drain_data2.setFont(font)
self.ui_drain_data2.setObjectName("ui_drain_data2")
self.gridLayout.addWidget(self.ui_drain_data2, 0, 2, 1, 1)
self.gridLayoutWidget_3 = QtWidgets.QWidget(self.device_3)
self.gridLayoutWidget_3.setGeometry(QtCore.QRect(10, 40, 321, 173))
self.gridLayoutWidget_3.setObjectName("gridLayoutWidget_3")
self.gridLayout_3 = QtWidgets.QGridLayout(self.gridLayoutWidget_3)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.ui_wheel_data = QtWidgets.QLineEdit(self.gridLayoutWidget_3)
self.ui_wheel_data.setMaximumSize(QtCore.QSize(35, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_wheel_data.setFont(font)
self.ui_wheel_data.setObjectName("ui_wheel_data")
self.gridLayout_3.addWidget(self.ui_wheel_data, 4, 1, 1, 1)
self.DRAIN_6 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.DRAIN_6.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN_6.setMaximumSize(QtCore.QSize(25, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN_6.setFont(font)
self.DRAIN_6.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN_6.setObjectName("DRAIN_6")
self.gridLayout_3.addWidget(self.DRAIN_6, 2, 2, 1, 1)
self.DRAIN_10 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.DRAIN_10.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN_10.setMaximumSize(QtCore.QSize(25, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN_10.setFont(font)
self.DRAIN_10.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN_10.setObjectName("DRAIN_10")
self.gridLayout_3.addWidget(self.DRAIN_10, 4, 2, 1, 1)
self.ui_acid_data = QtWidgets.QLineEdit(self.gridLayoutWidget_3)
self.ui_acid_data.setMaximumSize(QtCore.QSize(35, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_acid_data.setFont(font)
self.ui_acid_data.setObjectName("ui_acid_data")
self.gridLayout_3.addWidget(self.ui_acid_data, 3, 1, 1, 1)
self.ui_alkali_data = QtWidgets.QLineEdit(self.gridLayoutWidget_3)
self.ui_alkali_data.setMaximumSize(QtCore.QSize(35, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_alkali_data.setFont(font)
self.ui_alkali_data.setObjectName("ui_alkali_data")
self.gridLayout_3.addWidget(self.ui_alkali_data, 2, 1, 1, 1)
self.DRAIN_4 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.DRAIN_4.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN_4.setMaximumSize(QtCore.QSize(25, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN_4.setFont(font)
self.DRAIN_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN_4.setObjectName("DRAIN_4")
self.gridLayout_3.addWidget(self.DRAIN_4, 1, 2, 1, 1)
self.DRAIN_8 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.DRAIN_8.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN_8.setMaximumSize(QtCore.QSize(25, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN_8.setFont(font)
self.DRAIN_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN_8.setObjectName("DRAIN_8")
self.gridLayout_3.addWidget(self.DRAIN_8, 3, 2, 1, 1)
self.label_chem = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_chem.setMinimumSize(QtCore.QSize(0, 14))
self.label_chem.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_chem.setFont(font)
self.label_chem.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_chem.setObjectName("label_chem")
self.gridLayout_3.addWidget(self.label_chem, 0, 0, 1, 1)
self.ui_wax_data = QtWidgets.QLineEdit(self.gridLayoutWidget_3)
self.ui_wax_data.setMaximumSize(QtCore.QSize(35, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_wax_data.setFont(font)
self.ui_wax_data.setObjectName("ui_wax_data")
self.gridLayout_3.addWidget(self.ui_wax_data, 5, 1, 1, 1)
self.label_wheel_data = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_wheel_data.setMinimumSize(QtCore.QSize(0, 14))
self.label_wheel_data.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_wheel_data.setFont(font)
self.label_wheel_data.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_wheel_data.setObjectName("label_wheel_data")
self.gridLayout_3.addWidget(self.label_wheel_data, 4, 0, 1, 1)
self.label_wax_data = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_wax_data.setMinimumSize(QtCore.QSize(0, 14))
self.label_wax_data.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_wax_data.setFont(font)
self.label_wax_data.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_wax_data.setObjectName("label_wax_data")
self.gridLayout_3.addWidget(self.label_wax_data, 5, 0, 1, 1)
self.label_acid_data = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_acid_data.setMinimumSize(QtCore.QSize(0, 14))
self.label_acid_data.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_acid_data.setFont(font)
self.label_acid_data.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_acid_data.setObjectName("label_acid_data")
self.gridLayout_3.addWidget(self.label_acid_data, 3, 0, 1, 1)
self.label_water_data = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_water_data.setMinimumSize(QtCore.QSize(0, 14))
self.label_water_data.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_water_data.setFont(font)
self.label_water_data.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_water_data.setObjectName("label_water_data")
self.gridLayout_3.addWidget(self.label_water_data, 1, 0, 1, 1)
self.label_alkali_data = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_alkali_data.setMinimumSize(QtCore.QSize(0, 14))
self.label_alkali_data.setMaximumSize(QtCore.QSize(40, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_alkali_data.setFont(font)
self.label_alkali_data.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_alkali_data.setObjectName("label_alkali_data")
self.gridLayout_3.addWidget(self.label_alkali_data, 2, 0, 1, 1)
self.ui_water_data = QtWidgets.QLineEdit(self.gridLayoutWidget_3)
self.ui_water_data.setMaximumSize(QtCore.QSize(35, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_water_data.setFont(font)
self.ui_water_data.setObjectName("ui_water_data")
self.gridLayout_3.addWidget(self.ui_water_data, 1, 1, 1, 1)
self.DRAIN_12 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.DRAIN_12.setMinimumSize(QtCore.QSize(0, 14))
self.DRAIN_12.setMaximumSize(QtCore.QSize(25, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.DRAIN_12.setFont(font)
self.DRAIN_12.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.DRAIN_12.setObjectName("DRAIN_12")
self.gridLayout_3.addWidget(self.DRAIN_12, 5, 2, 1, 1)
self.led_water = QtWidgets.QToolButton(self.gridLayoutWidget_3)
self.led_water.setMaximumSize(QtCore.QSize(150, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.led_water.setFont(font)
self.led_water.setObjectName("led_water")
self.gridLayout_3.addWidget(self.led_water, 1, 3, 1, 1)
self.led_alkali = QtWidgets.QToolButton(self.gridLayoutWidget_3)
self.led_alkali.setMaximumSize(QtCore.QSize(150, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.led_alkali.setFont(font)
self.led_alkali.setObjectName("led_alkali")
self.gridLayout_3.addWidget(self.led_alkali, 2, 3, 1, 1)
self.led_acid = QtWidgets.QToolButton(self.gridLayoutWidget_3)
self.led_acid.setMaximumSize(QtCore.QSize(150, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.led_acid.setFont(font)
self.led_acid.setObjectName("led_acid")
self.gridLayout_3.addWidget(self.led_acid, 3, 3, 1, 1)
self.led_wheel = QtWidgets.QToolButton(self.gridLayoutWidget_3)
self.led_wheel.setMaximumSize(QtCore.QSize(150, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.led_wheel.setFont(font)
self.led_wheel.setObjectName("led_wheel")
self.gridLayout_3.addWidget(self.led_wheel, 4, 3, 1, 1)
self.led_wax = QtWidgets.QToolButton(self.gridLayoutWidget_3)
self.led_wax.setMaximumSize(QtCore.QSize(150, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.led_wax.setFont(font)
self.led_wax.setObjectName("led_wax")
self.gridLayout_3.addWidget(self.led_wax, 5, 3, 1, 1)
self.tab_pumps_station.addTab(self.device_3, "")
self.tab_device_3 = QtWidgets.QTabWidget(self.SBC_2)
self.tab_device_3.setGeometry(QtCore.QRect(10, 230, 371, 141))
self.tab_device_3.setTabPosition(QtWidgets.QTabWidget.West)
self.tab_device_3.setTabShape(QtWidgets.QTabWidget.Triangular)
self.tab_device_3.setElideMode(QtCore.Qt.ElideLeft)
self.tab_device_3.setObjectName("tab_device_3")
self.pumpswitch = QtWidgets.QWidget()
self.pumpswitch.setObjectName("pumpswitch")
self.btn_all_stop = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_all_stop.setGeometry(QtCore.QRect(0, 60, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_all_stop.setFont(font)
self.btn_all_stop.setObjectName("btn_all_stop")
self.btn_high_water = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_high_water.setGeometry(QtCore.QRect(70, 60, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_high_water.setFont(font)
self.btn_high_water.setObjectName("btn_high_water")
self.btn_wheel = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_wheel.setGeometry(QtCore.QRect(170, 60, 71, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_wheel.setFont(font)
self.btn_wheel.setObjectName("btn_wheel")
self.btn_alkali = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_alkali.setGeometry(QtCore.QRect(240, 60, 71, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_alkali.setFont(font)
self.btn_alkali.setObjectName("btn_alkali")
self.btn_acid = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_acid.setGeometry(QtCore.QRect(0, 80, 71, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_acid.setFont(font)
self.btn_acid.setObjectName("btn_acid")
self.btn_water_wax = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_water_wax.setGeometry(QtCore.QRect(70, 80, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_water_wax.setFont(font)
self.btn_water_wax.setObjectName("btn_water_wax")
self.btn_drain = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_drain.setGeometry(QtCore.QRect(170, 80, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_drain.setFont(font)
self.btn_drain.setObjectName("btn_drain")
self.btn_water_inflow = QtWidgets.QCheckBox(self.pumpswitch)
self.btn_water_inflow.setGeometry(QtCore.QRect(240, 80, 101, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.btn_water_inflow.setFont(font)
self.btn_water_inflow.setObjectName("btn_water_inflow")
self.label_pump_1 = QtWidgets.QLabel(self.pumpswitch)
self.label_pump_1.setGeometry(QtCore.QRect(0, 10, 51, 14))
self.label_pump_1.setMinimumSize(QtCore.QSize(0, 14))
self.label_pump_1.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_pump_1.setFont(font)
self.label_pump_1.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_pump_1.setObjectName("label_pump_1")
self.ui_log_pump = QtWidgets.QLineEdit(self.pumpswitch)
self.ui_log_pump.setGeometry(QtCore.QRect(40, 10, 251, 15))
self.ui_log_pump.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_log_pump.setFont(font)
self.ui_log_pump.setText("")
self.ui_log_pump.setObjectName("ui_log_pump")
self.led_high_water = QtWidgets.QToolButton(self.pumpswitch)
self.led_high_water.setGeometry(QtCore.QRect(40, 30, 50, 14))
self.led_high_water.setMinimumSize(QtCore.QSize(50, 0))
self.led_high_water.setMaximumSize(QtCore.QSize(55, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_high_water.setFont(font)
self.led_high_water.setToolTip("")
self.led_high_water.setToolTipDuration(-1)
self.led_high_water.setObjectName("led_high_water")
self.led_ch_alkali = QtWidgets.QToolButton(self.pumpswitch)
self.led_ch_alkali.setGeometry(QtCore.QRect(90, 30, 50, 14))
self.led_ch_alkali.setMinimumSize(QtCore.QSize(50, 0))
self.led_ch_alkali.setMaximumSize(QtCore.QSize(55, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_ch_alkali.setFont(font)
self.led_ch_alkali.setToolTip("")
self.led_ch_alkali.setToolTipDuration(-1)
self.led_ch_alkali.setObjectName("led_ch_alkali")
self.led_ch_acid = QtWidgets.QToolButton(self.pumpswitch)
self.led_ch_acid.setGeometry(QtCore.QRect(140, 30, 50, 14))
self.led_ch_acid.setMinimumSize(QtCore.QSize(50, 0))
self.led_ch_acid.setMaximumSize(QtCore.QSize(55, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_ch_acid.setFont(font)
self.led_ch_acid.setToolTip("")
self.led_ch_acid.setToolTipDuration(-1)
self.led_ch_acid.setObjectName("led_ch_acid")
self.led_ch1_wheel = QtWidgets.QToolButton(self.pumpswitch)
self.led_ch1_wheel.setGeometry(QtCore.QRect(190, 30, 50, 14))
self.led_ch1_wheel.setMinimumSize(QtCore.QSize(50, 0))
self.led_ch1_wheel.setMaximumSize(QtCore.QSize(55, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_ch1_wheel.setFont(font)
self.led_ch1_wheel.setToolTip("")
self.led_ch1_wheel.setToolTipDuration(-1)
self.led_ch1_wheel.setObjectName("led_ch1_wheel")
self.led_ch1_wax = QtWidgets.QToolButton(self.pumpswitch)
self.led_ch1_wax.setGeometry(QtCore.QRect(240, 30, 50, 14))
self.led_ch1_wax.setMinimumSize(QtCore.QSize(50, 0))
self.led_ch1_wax.setMaximumSize(QtCore.QSize(55, 14))
font = QtGui.QFont()
font.setPointSize(8)
self.led_ch1_wax.setFont(font)
self.led_ch1_wax.setToolTip("")
self.led_ch1_wax.setToolTipDuration(-1)
self.led_ch1_wax.setObjectName("led_ch1_wax")
self.label_pump_2 = QtWidgets.QLabel(self.pumpswitch)
self.label_pump_2.setGeometry(QtCore.QRect(10, 110, 51, 14))
self.label_pump_2.setMinimumSize(QtCore.QSize(0, 14))
self.label_pump_2.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_pump_2.setFont(font)
self.label_pump_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_pump_2.setObjectName("label_pump_2")
self.ui_log_pump_countdown = QtWidgets.QLineEdit(self.pumpswitch)
self.ui_log_pump_countdown.setGeometry(QtCore.QRect(50, 110, 121, 15))
self.ui_log_pump_countdown.setMaximumSize(QtCore.QSize(16777215, 15))
font = QtGui.QFont()
font.setPointSize(9)
self.ui_log_pump_countdown.setFont(font)
self.ui_log_pump_countdown.setText("")
self.ui_log_pump_countdown.setObjectName("ui_log_pump_countdown")
self.label_pump_3 = QtWidgets.QLabel(self.pumpswitch)
self.label_pump_3.setGeometry(QtCore.QRect(190, 110, 71, 14))
self.label_pump_3.setMinimumSize(QtCore.QSize(0, 14))
self.label_pump_3.setMaximumSize(QtCore.QSize(16777215, 14))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_pump_3.setFont(font)
self.label_pump_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_pump_3.setObjectName("label_pump_3")
self.pump_countdown_box = QtWidgets.QSpinBox(self.pumpswitch)
self.pump_countdown_box.setGeometry(QtCore.QRect(260, 110, 48, 16))
font = QtGui.QFont()
font.setPointSize(10)
self.pump_countdown_box.setFont(font)
self.pump_countdown_box.setObjectName("pump_countdown_box")
self.tab_device_3.addTab(self.pumpswitch, "")
SBC.setCentralWidget(self.SBC_2)
self.retranslateUi(SBC)
self.tab_device.setCurrentIndex(0)
self.tab_device_2.setCurrentIndex(0)
self.tab_pumps_station.setCurrentIndex(0)
self.tab_device_3.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(SBC)
def retranslateUi(self, SBC):
_translate = QtCore.QCoreApplication.translate
SBC.setWindowTitle(_translate("SBC", "SBC"))
self.label_pump_station.setText(_translate("SBC", "PUMP STATION"))
self.ip_local.setText(_translate("SBC", "LocalIP : 0.0.0.0"))
self.ip_nuc.setText(_translate("SBC", "NucIP : 0.0.0.0"))
self.led_pump_station.setText(_translate("SBC", "OFF"))
self.label_guides.setText(_translate("SBC", "GUIDES"))
self.led_guides.setText(_translate("SBC", "OFF"))
self.tab_device.setTabText(self.tab_device.indexOf(self.device), _translate("SBC", "DEVICE"))
self.label_stage_show.setText(_translate("SBC", "STAGE SHOW"))
self.label_stage_show_btn.setText(_translate("SBC", "SHOW BTN"))
self.btn_welcome.setText(_translate("SBC", "欢迎光临"))
self.btn_forward.setText(_translate("SBC", "向前行驶"))
self.btn_stop_forward.setText(_translate("SBC", "停止行驶"))
self.btn_back_driving.setText(_translate("SBC", "向后行驶"))
self.btn_washing.setText(_translate("SBC", "正在清洗"))
self.btn_washing_end.setText(_translate("SBC", "清洗结束"))
self.label_guides_2.setText(_translate("SBC", "GUIDES"))
self.tab_device_2.setTabText(self.tab_device_2.indexOf(self.device_2), _translate("SBC", "GUIDES"))
self.DRAIN.setText(_translate("SBC", "DRAIN"))
self.DRAIN_6.setText(_translate("SBC", "mm"))
self.DRAIN_10.setText(_translate("SBC", "mm"))
self.DRAIN_4.setText(_translate("SBC", "mm"))
self.DRAIN_8.setText(_translate("SBC", "mm"))
self.label_chem.setText(_translate("SBC", "LIQUID"))
self.label_wheel_data.setText(_translate("SBC", "WHEEL"))
self.label_wax_data.setText(_translate("SBC", "WAX"))
self.label_acid_data.setText(_translate("SBC", "ACID"))
self.label_water_data.setText(_translate("SBC", "WATER"))
self.label_alkali_data.setText(_translate("SBC", "ALKALI"))
self.DRAIN_12.setText(_translate("SBC", "mm"))
self.led_water.setText(_translate("SBC", "full"))
self.led_alkali.setText(_translate("SBC", "full"))
self.led_acid.setText(_translate("SBC", "full"))
self.led_wheel.setText(_translate("SBC", "full"))
self.led_wax.setText(_translate("SBC", "full"))
self.tab_pumps_station.setTabText(self.tab_pumps_station.indexOf(self.device_3), _translate("SBC", "PUMPS STATION"))
self.btn_all_stop.setText(_translate("SBC", "ALL STOP"))
self.btn_high_water.setText(_translate("SBC", "HIGH WATER"))
self.btn_wheel.setText(_translate("SBC", "WHEEL"))
self.btn_alkali.setText(_translate("SBC", "ALKALI "))
self.btn_acid.setText(_translate("SBC", "ACID"))
self.btn_water_wax.setText(_translate("SBC", "WATER WAX"))
self.btn_drain.setText(_translate("SBC", "DRAIN"))
self.btn_water_inflow.setText(_translate("SBC", "WATER INFLOW"))
self.label_pump_1.setText(_translate("SBC", "PUMP"))
self.led_high_water.setText(_translate("SBC", "P"))
self.led_ch_alkali.setText(_translate("SBC", "C1"))
self.led_ch_acid.setText(_translate("SBC", "C2"))
self.led_ch1_wheel.setText(_translate("SBC", "WE"))
self.led_ch1_wax.setText(_translate("SBC", "WX"))
self.label_pump_2.setText(_translate("SBC", "PUMP"))
self.label_pump_3.setText(_translate("SBC", "剩余延迟时间"))
self.tab_device_3.setTabText(self.tab_device_3.indexOf(self.pumpswitch), _translate("SBC", "PUMPSWITCH"))
| [
"[email protected]"
] | |
8332e30937e9e1b5e5122db696b4431f00c38374 | 6223dc2e5de7921696cb34fb62142fd4a4efe361 | /.metadata/.plugins/org.eclipse.core.resources/.history/51/40e6c6177739001412b5c17ef71e72e3 | 6db0fb731998676d3ddb05dbce7d5249db6922c6 | [] | no_license | Mushirahmed/python_workspace | 5ef477b2688e8c25b1372f546752501ee53d93e5 | 46e2ed783b17450aba29e4e2df7b656522b2b03b | refs/heads/master | 2021-03-12T19:24:50.598982 | 2015-05-25T10:23:54 | 2015-05-25T10:23:54 | 24,671,376 | 0 | 1 | null | 2015-02-06T09:27:40 | 2014-10-01T08:40:33 | Python | UTF-8 | Python | false | false | 1,442 | #!/usr/bin/env python
import wx
def slider(parent, min, max, callback):
"""
Return a wx.Slider object.
@param min: minimum slider value
@type min: float
@param max: maximum slider value
@type max: float
@param callback: function of one arg invoked when slider moves.
@rtype: wx.Slider
"""
new_id = wx.NewId()
s = wx.Slider(parent, new_id, (max+min)/2, min, max, wx.DefaultPosition,
wx.Size(250,-1), wx.SL_HORIZONTAL | wx.SL_LABELS)
wx.EVT_COMMAND_SCROLL(parent, new_id,
lambda evt : callback(evt.GetInt()))
return s
# ----------------------------------------------------------------
# Demo app
# ----------------------------------------------------------------
if __name__ == '__main__':
from gnuradio.wxgui import stdgui2
class demo_graph(stdgui.gui_flow_graph):
def __init__(self, frame, panel, vbox, argv):
stdgui.gui_flow_graph.__init__ (self, frame, panel, vbox, argv)
vbox.Add(slider(panel, 23, 47, self.my_callback1), 1, wx.ALIGN_CENTER)
vbox.Add(slider(panel, -100, 100, self.my_callback2), 1, wx.ALIGN_CENTER)
def my_callback1(self, val):
print "cb1 = ", val
def my_callback2(self, val):
print "cb2 = ", val
def main ():
app = stdgui.stdapp (demo_graph, "Slider Demo")
app.MainLoop ()
main ()
| [
"[email protected]"
] | ||
532a4c353a1544432b498ed028eb0f65b6b9fc4d | e2860eb874db045fb8d0279566a935af907e5bdf | /ml/ml07_1_boston.py | b245a54bef04d78667e33b52f33e63088f0a8179 | [] | no_license | MinseokCHAE/Bitcamp2_new | dda7990907cb136c2e709a345eec634dfdb6ac02 | 849adb5a330b621f1c681f0b5e92005d1281a44d | refs/heads/main | 2023-08-31T03:28:18.068561 | 2021-10-05T00:48:52 | 2021-10-05T00:48:52 | 390,228,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,847 | py | import numpy as np
import time
from sklearn.metrics import r2_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler, QuantileTransformer, OneHotEncoder
from sklearn.datasets import load_boston
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Input, Conv1D, Flatten, MaxPooling1D, GlobalAveragePooling1D, Dropout
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.utils import to_categorical
#1. data preprocessing
boston = load_boston()
x = boston.data
y = boston.target
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=21)
scaler = MinMaxScaler()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
from sklearn.model_selection import KFold, cross_val_score, GridSearchCV
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import accuracy_score
n_splits = 5
kfold = KFold(n_splits=n_splits, shuffle=True, random_state=21)
parameter = [
{'n_estimators': [100,200]},
{'max_depth': [6, 8, 10, 12]},
{'min_samples_leaf': [3, 5, 7, 10]},
{'min_samples_split': [2, 3, 5, 10]},
{'n_jobs': [-1, 2, 4]}
]
model = RandomForestRegressor()
grid = GridSearchCV(model, parameter, cv=kfold)
grid.fit(x_train, y_train)
best_estimator = grid.best_estimator_
best_score = grid.best_score_
# y_pred = grid.predict(x_test)
# acc_score = accuracy_score(y_test, y_pred)
grid_score = grid.score(x_test, y_test)
print('best parameter = ', best_estimator)
print('best score = ', best_score)
# print('acc score = ', acc_score)
print('grid score = ', grid_score)
# best parameter = RandomForestRegressor(min_samples_split=5)
# best score = 0.830591307770115
# grid score = 0.8783616408326427
| [
"[email protected]"
] | |
a161266ee413fb7f3bb8b94466c9d03314de7ee9 | 633b695a03e789f6aa644c7bec7280367a9252a8 | /lmfit_gallery/documentation/fitting_withreport.py | 412f4c07159b2a6fb06c2af10b0d239b29d68e3f | [] | no_license | tnakaicode/PlotGallery | 3d831d3245a4a51e87f48bd2053b5ef82cf66b87 | 5c01e5d6e2425dbd17593cb5ecc973982f491732 | refs/heads/master | 2023-08-16T22:54:38.416509 | 2023-08-03T04:23:21 | 2023-08-03T04:23:21 | 238,610,688 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,206 | py | """
doc_fitting_withreport.py
=========================
"""
# <examples/doc_fitting_withreport.py>
from numpy import exp, linspace, pi, random, sign, sin
from lmfit import Parameters, fit_report, minimize
p_true = Parameters()
p_true.add('amp', value=14.0)
p_true.add('period', value=5.46)
p_true.add('shift', value=0.123)
p_true.add('decay', value=0.032)
def residual(pars, x, data=None):
"""Model a decaying sine wave and subtract data."""
vals = pars.valuesdict()
amp = vals['amp']
per = vals['period']
shift = vals['shift']
decay = vals['decay']
if abs(shift) > pi/2:
shift = shift - sign(shift)*pi
model = amp * sin(shift + x/per) * exp(-x*x*decay*decay)
if data is None:
return model
return model - data
random.seed(0)
x = linspace(0.0, 250., 1001)
noise = random.normal(scale=0.7215, size=x.size)
data = residual(p_true, x) + noise
fit_params = Parameters()
fit_params.add('amp', value=13.0)
fit_params.add('period', value=2)
fit_params.add('shift', value=0.0)
fit_params.add('decay', value=0.02)
out = minimize(residual, fit_params, args=(x,), kws={'data': data})
print(fit_report(out))
# <end examples/doc_fitting_withreport.py>
| [
"[email protected]"
] | |
78b580625bf05f9a4e3f617d22606d8993dc1471 | 07c27cbba56ffb1f2e391d2aaceefba039f68667 | /bin/svg.py | 0f7e900113122f37f95eb346261053c090c4287c | [] | no_license | rheiland/tool4nanobio | beb3914ad23638bb856454832c83ab3c6535ae86 | e872ae02e7df784bcde0481b30c6d97a0ae3a517 | refs/heads/master | 2020-04-25T17:11:52.995649 | 2020-01-16T11:32:47 | 2020-01-16T11:32:47 | 172,938,698 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 13,242 | py | # SVG (Cell Plot) Tab
import os
from ipywidgets import Layout, Label, Text, Checkbox, Button, HBox, VBox, Box, \
FloatText, BoundedIntText, BoundedFloatText, HTMLMath, Dropdown, interactive, Output
from collections import deque
import xml.etree.ElementTree as ET
import matplotlib.pyplot as plt
import matplotlib.colors as mplc
import numpy as np
import zipfile
import glob
import platform
from debug import debug_view
hublib_flag = True
if platform.system() != 'Windows':
try:
# print("Trying to import hublib.ui")
from hublib.ui import Download
except:
hublib_flag = False
else:
hublib_flag = False
class SVGTab(object):
def __init__(self):
# tab_height = '520px'
# tab_layout = Layout(width='900px', # border='2px solid black',
# height=tab_height, overflow_y='scroll')
self.output_dir = '.'
constWidth = '180px'
# self.fig = plt.figure(figsize=(6, 6))
# self.fig = plt.figure(figsize=(7, 7))
max_frames = 1
self.svg_plot = interactive(self.plot_svg, frame=(0, max_frames), continuous_update=False)
plot_size = '500px'
plot_size = '700px'
self.svg_plot.layout.width = plot_size
self.svg_plot.layout.height = plot_size
self.use_defaults = True
self.show_nucleus = 0 # 0->False, 1->True in Checkbox!
self.show_edge = 1 # 0->False, 1->True in Checkbox!
self.scale_radius = 1.0
self.axes_min = 0.0
self.axes_max = 2000 # hmm, this can change (TODO?)
self.max_frames = BoundedIntText(
min=0, max=99999, value=max_frames,
description='Max',
layout=Layout(width='160px'),
# layout=Layout(flex='1 1 auto', width='auto'), #Layout(width='160px'),
)
self.max_frames.observe(self.update_max_frames)
self.show_nucleus_checkbox= Checkbox(
description='nucleus', value=False, disabled=False,
layout=Layout(width=constWidth),
# layout=Layout(flex='1 1 auto', width='auto'), #Layout(width='160px'),
)
self.show_nucleus_checkbox.observe(self.show_nucleus_cb)
self.show_edge_checkbox= Checkbox(
description='edge', value=True, disabled=False,
layout=Layout(width=constWidth),
# layout=Layout(flex='1 1 auto', width='auto'), #Layout(width='160px'),
)
self.show_edge_checkbox.observe(self.show_edge_cb)
# row1 = HBox([Label('(select slider: drag or left/right arrows)'),
# self.max_frames, VBox([self.show_nucleus_checkbox, self.show_edge_checkbox])])
# self.max_frames, self.show_nucleus_checkbox], layout=Layout(width='500px'))
# self.tab = VBox([row1,self.svg_plot], layout=tab_layout)
items_auto = [Label('select slider: drag or left/right arrows'),
self.max_frames,
self.show_nucleus_checkbox,
self.show_edge_checkbox,
]
#row1 = HBox([Label('(select slider: drag or left/right arrows)'),
# max_frames, show_nucleus_checkbox, show_edge_checkbox],
# layout=Layout(width='800px'))
box_layout = Layout(display='flex',
flex_flow='row',
align_items='stretch',
width='70%')
row1 = Box(children=items_auto, layout=box_layout)
if (hublib_flag):
self.download_button = Download('svg.zip', style='warning', icon='cloud-download',
tooltip='You need to allow pop-ups in your browser', cb=self.download_cb)
download_row = HBox([self.download_button.w, Label("Download all cell plots (browser must allow pop-ups).")])
# self.tab = VBox([row1, self.svg_plot, self.download_button.w], layout=tab_layout)
# self.tab = VBox([row1, self.svg_plot, self.download_button.w])
self.tab = VBox([row1, self.svg_plot, download_row])
else:
self.tab = VBox([row1, self.svg_plot])
def update(self, rdir=''):
# with debug_view:
# print("SVG: update rdir=", rdir)
if rdir:
self.output_dir = rdir
all_files = sorted(glob.glob(os.path.join(self.output_dir, 'snapshot*.svg')))
if len(all_files) > 0:
last_file = all_files[-1]
self.max_frames.value = int(last_file[-12:-4]) # assumes naming scheme: "snapshot%08d.svg"
# with debug_view:
# print("SVG: added %s files" % len(all_files))
def download_cb(self):
file_str = os.path.join(self.output_dir, '*.svg')
# print('zip up all ',file_str)
with zipfile.ZipFile('svg.zip', 'w') as myzip:
for f in glob.glob(file_str):
myzip.write(f, os.path.basename(f)) # 2nd arg avoids full filename path in the archive
def show_nucleus_cb(self, b):
global current_frame
if (self.show_nucleus_checkbox.value):
self.show_nucleus = 1
else:
self.show_nucleus = 0
# self.plot_svg(self,current_frame)
self.svg_plot.update()
def show_edge_cb(self, b):
if (self.show_edge_checkbox.value):
self.show_edge = 1
else:
self.show_edge = 0
self.svg_plot.update()
def update_max_frames(self,_b):
self.svg_plot.children[0].max = self.max_frames.value
def plot_svg(self, frame):
# global current_idx, axes_max
global current_frame
current_frame = frame
fname = "snapshot%08d.svg" % frame
full_fname = os.path.join(self.output_dir, fname)
# with debug_view:
# print("plot_svg:", full_fname)
if not os.path.isfile(full_fname):
print("Once output files are generated, click the slider.")
return
xlist = deque()
ylist = deque()
rlist = deque()
rgb_list = deque()
# print('\n---- ' + fname + ':')
# tree = ET.parse(fname)
tree = ET.parse(full_fname)
root = tree.getroot()
# print('--- root.tag ---')
# print(root.tag)
# print('--- root.attrib ---')
# print(root.attrib)
# print('--- child.tag, child.attrib ---')
numChildren = 0
for child in root:
# print(child.tag, child.attrib)
# print("keys=",child.attrib.keys())
if self.use_defaults and ('width' in child.attrib.keys()):
self.axes_max = float(child.attrib['width'])
# print("debug> found width --> axes_max =", axes_max)
if child.text and "Current time" in child.text:
svals = child.text.split()
# title_str = "(" + str(current_idx) + ") Current time: " + svals[2] + "d, " + svals[4] + "h, " + svals[7] + "m"
# title_str = "Current time: " + svals[2] + "d, " + svals[4] + "h, " + svals[7] + "m"
title_str = svals[2] + "d, " + svals[4] + "h, " + svals[7] + "m"
# print("width ",child.attrib['width'])
# print('attrib=',child.attrib)
# if (child.attrib['id'] == 'tissue'):
if ('id' in child.attrib.keys()):
# print('-------- found tissue!!')
tissue_parent = child
break
# print('------ search tissue')
cells_parent = None
for child in tissue_parent:
# print('attrib=',child.attrib)
if (child.attrib['id'] == 'cells'):
# print('-------- found cells, setting cells_parent')
cells_parent = child
break
numChildren += 1
num_cells = 0
# print('------ search cells')
for child in cells_parent:
# print(child.tag, child.attrib)
# print('attrib=',child.attrib)
for circle in child: # two circles in each child: outer + nucleus
# circle.attrib={'cx': '1085.59','cy': '1225.24','fill': 'rgb(159,159,96)','r': '6.67717','stroke': 'rgb(159,159,96)','stroke-width': '0.5'}
# print(' --- cx,cy=',circle.attrib['cx'],circle.attrib['cy'])
xval = float(circle.attrib['cx'])
s = circle.attrib['fill']
# print("s=",s)
# print("type(s)=",type(s))
if (s[0:3] == "rgb"): # if an rgb string, e.g. "rgb(175,175,80)"
rgb = list(map(int, s[4:-1].split(",")))
rgb[:] = [x / 255. for x in rgb]
else: # otherwise, must be a color name
rgb_tuple = mplc.to_rgb(mplc.cnames[s]) # a tuple
rgb = [x for x in rgb_tuple]
# test for bogus x,y locations (rwh TODO: use max of domain?)
too_large_val = 10000.
if (np.fabs(xval) > too_large_val):
print("bogus xval=", xval)
break
yval = float(circle.attrib['cy'])
if (np.fabs(yval) > too_large_val):
print("bogus xval=", xval)
break
rval = float(circle.attrib['r'])
# if (rgb[0] > rgb[1]):
# print(num_cells,rgb, rval)
xlist.append(xval)
ylist.append(yval)
rlist.append(rval)
rgb_list.append(rgb)
# For .svg files with cells that *have* a nucleus, there will be a 2nd
if (self.show_nucleus == 0):
#if (not self.show_nucleus):
break
num_cells += 1
# if num_cells > 3: # for debugging
# print(fname,': num_cells= ',num_cells," --- debug exit.")
# sys.exit(1)
# break
# print(fname,': num_cells= ',num_cells)
xvals = np.array(xlist)
yvals = np.array(ylist)
rvals = np.array(rlist)
rgbs = np.array(rgb_list)
# print("xvals[0:5]=",xvals[0:5])
# print("rvals[0:5]=",rvals[0:5])
# print("rvals.min, max=",rvals.min(),rvals.max())
# rwh - is this where I change size of render window?? (YES - yipeee!)
# plt.figure(figsize=(6, 6))
# plt.cla()
title_str += " (" + str(num_cells) + " agents)"
# plt.title(title_str)
# plt.xlim(axes_min,axes_max)
# plt.ylim(axes_min,axes_max)
# plt.scatter(xvals,yvals, s=rvals*scale_radius, c=rgbs)
# self.fig = plt.figure(figsize=(6, 6))
self.fig = plt.figure(figsize=(7, 7))
# axx = plt.axes([0, 0.05, 0.9, 0.9]) # left, bottom, width, height
# axx = fig.gca()
# print('fig.dpi=',fig.dpi) # = 72
# im = ax.imshow(f.reshape(100,100), interpolation='nearest', cmap=cmap, extent=[0,20, 0,20])
# ax.xlim(axes_min,axes_max)
# ax.ylim(axes_min,axes_max)
# convert radii to radii in pixels
# ax2 = fig.gca()
ax2 = self.fig.gca()
N = len(xvals)
rr_pix = (ax2.transData.transform(np.vstack([rvals, rvals]).T) -
ax2.transData.transform(np.vstack([np.zeros(N), np.zeros(N)]).T))
rpix, _ = rr_pix.T
markers_size = (144. * rpix / self.fig.dpi)**2 # = (2*rpix / fig.dpi * 72)**2
# markers_size = (2*rpix / fig.dpi * 72)**2
markers_size = markers_size/4000000.
# print('max=',markers_size.max())
# ax.scatter(xvals,yvals, s=rvals*self.scale_radius, c=rgbs)
# axx.scatter(xvals,yvals, s=markers_size, c=rgbs)
#rwh - temp fix - Ah, error only occurs when "edges" is toggled on
if (self.show_edge):
try:
plt.scatter(xvals,yvals, s=markers_size, c=rgbs, edgecolor='black', linewidth=0.5)
except (ValueError):
pass
else:
plt.scatter(xvals,yvals, s=markers_size, c=rgbs)
plt.xlim(self.axes_min, self.axes_max)
plt.ylim(self.axes_min, self.axes_max)
# ax.grid(False)
# axx.set_title(title_str)
plt.title(title_str)
# video-style widget - perhaps for future use
# svg_play = widgets.Play(
# interval=1,
# value=50,
# min=0,
# max=100,
# step=1,
# description="Press play",
# disabled=False,
# )
# def svg_slider_change(change):
# print('svg_slider_change: type(change)=',type(change),change.new)
# plot_svg(change.new)
#svg_play
# svg_slider = widgets.IntSlider()
# svg_slider.observe(svg_slider_change, names='value')
# widgets.jslink((svg_play, 'value'), (svg_slider,'value')) # (svg_slider, 'value'), (plot_svg, 'value'))
# svg_slider = widgets.IntSlider()
# widgets.jslink((play, 'value'), (slider, 'value'))
# widgets.HBox([svg_play, svg_slider])
# Using the following generates a new mpl plot; it doesn't use the existing plot!
#svg_anim = widgets.HBox([svg_play, svg_slider])
#svg_tab = widgets.VBox([svg_dir, svg_plot, svg_anim], layout=tab_layout)
#svg_tab = widgets.VBox([svg_dir, svg_anim], layout=tab_layout)
#---------------------
| [
"[email protected]"
] | |
06683c64c9c082713d0b286d60bf3d006bef3569 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/NicolasHug_Surprise/Surprise-master/examples/grid_search_usage.py | f915af8c2eff0478eb4c7a991024a2a4e4aa1ff3 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 1,150 | py | """
This module describes how to manually train and test an algorithm without using
the evaluate() function.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from surprise import GridSearch
from surprise import SVD
from surprise import Dataset
param_grid = {'n_epochs': [5, 10], 'lr_all': [0.002, 0.005],
'reg_all': [0.4, 0.6]}
grid_search = GridSearch(SVD, param_grid, measures=['RMSE', 'FCP'])
# Prepare Data
data = Dataset.load_builtin('ml-100k')
data.split(n_folds=3)
grid_search.evaluate(data)
# best RMSE score
print(grid_search.best_score['RMSE'])
# >>> 0.96117566386
# combination of parameters that gave the best RMSE score
print(grid_search.best_params['RMSE'])
# >>> {'reg_all': 0.4, 'lr_all': 0.005, 'n_epochs': 10}
# best FCP score
print(grid_search.best_score['FCP'])
# >>> 0.702279736531
# combination of parameters that gave the best FCP score
print(grid_search.best_params['FCP'])
# >>> {'reg_all': 0.6, 'lr_all': 0.005, 'n_epochs': 10}
import pandas as pd # noqa
results_df = pd.DataFrame.from_dict(grid_search.cv_results)
print(results_df)
| [
"[email protected]"
] | |
0359e9366c572e840e6a924176a959c6c328847d | e3c8f786d09e311d6ea1cab50edde040bf1ea988 | /Incident-Response/Tools/grr/grr/server/grr_response_server/gui/selenium_tests/report_test.py | 1175096622c718b20aa9b0c66c5f1c953997a6f7 | [
"Apache-2.0",
"MIT"
] | permissive | foss2cyber/Incident-Playbook | d1add8aec6e28a19e515754c6ce2e524d67f368e | a379a134c0c5af14df4ed2afa066c1626506b754 | refs/heads/main | 2023-06-07T09:16:27.876561 | 2021-07-07T03:48:54 | 2021-07-07T03:48:54 | 384,988,036 | 1 | 0 | MIT | 2021-07-11T15:45:31 | 2021-07-11T15:45:31 | null | UTF-8 | Python | false | false | 4,588 | py | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from absl import app
from selenium.webdriver.common import keys
from grr_response_core.lib import rdfvalue
from grr_response_server import data_store
from grr_response_server.gui import gui_test_lib
from grr_response_server.rdfvalues import objects as rdf_objects
from grr.test_lib import test_lib
def AddFakeAuditLog(user=None, router_method_name=None):
data_store.REL_DB.WriteAPIAuditEntry(
rdf_objects.APIAuditEntry(
username=user,
router_method_name=router_method_name,
))
class TestReports(gui_test_lib.GRRSeleniumTest):
"""Test the reports interface."""
def testReports(self):
"""Test the reports interface."""
with test_lib.FakeTime(
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")):
AddFakeAuditLog(user="User123")
with test_lib.FakeTime(
rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")):
AddFakeAuditLog(user="User456")
# Make "test" user an admin.
self.CreateAdminUser(u"test")
self.Open("/#/stats/")
# Go to reports.
self.Click("css=#MostActiveUsersReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Server | User Breakdown")
# Enter a timerange that only matches one of the two fake events.
self.Type("css=grr-form-datetime input", "2012-12-21 12:34")
self.Click("css=button:contains('Show report')")
self.WaitUntil(self.IsTextPresent, "User456")
self.assertFalse(self.IsTextPresent("User123"))
def testReportsDontIncludeTimerangesInUrlsOfReportsThatDontUseThem(self):
client_id = self.SetupClient(0)
self.AddClientLabel(client_id, u"owner", u"bar")
self.Open("/#/stats/")
# Go to reports.
self.Click("css=#MostActiveUsersReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Server | User Breakdown")
# Default values aren't shown in the url.
self.WaitUntilNot(lambda: "start_time" in self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
# Enter a timerange.
self.Type("css=grr-form-datetime input", "2012-12-21 12:34")
self.Type("css=grr-form-duration input", "2w")
self.Click("css=button:contains('Show report')")
# Reports that require timeranges include nondefault values in the url when
# `Show report' has been clicked.
self.WaitUntil(lambda: "start_time" in self.GetCurrentUrlPath())
self.assertIn("duration", self.GetCurrentUrlPath())
# Select a different report.
self.Click("css=#LastActiveReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Client | Last Active")
# The default label isn't included in the url.
self.WaitUntilNot(lambda: "bar" in self.GetCurrentUrlPath())
# Select a client label.
self.Select("css=grr-report select", "bar")
self.Click("css=button:contains('Show report')")
# Reports that require labels include them in the url after `Show report'
# has been clicked.
self.WaitUntil(lambda: "bar" in self.GetCurrentUrlPath())
# Reports that dont require timeranges don't mention them in the url.
self.assertNotIn("start_time", self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
# Select a different report.
self.Click("css=#GRRVersion7ReportPlugin_anchor i.jstree-icon")
self.WaitUntil(self.IsTextPresent, "Active Clients - 7 Days Active")
# The label is cleared when report type is changed.
self.WaitUntilNot(lambda: "bar" in self.GetCurrentUrlPath())
self.assertNotIn("start_time", self.GetCurrentUrlPath())
self.assertNotIn("duration", self.GetCurrentUrlPath())
class TestDateTimeInput(gui_test_lib.GRRSeleniumTest):
"""Tests datetime-form-directive."""
def testInputAllowsInvalidText(self):
# Make "test" user an admin.
self.CreateAdminUser(u"test")
# Open any page that shows the datetime-form-directive.
self.Open("/#/stats/HuntApprovalsReportPlugin")
datetime_input = self.WaitUntil(self.GetVisibleElement,
"css=grr-form-datetime input")
value = datetime_input.get_attribute("value")
self.assertRegex(value, r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}")
self.assertStartsWith(value, "20")
datetime_input.send_keys(keys.Keys.BACKSPACE)
self.WaitUntilNot(self.IsTextPresent, value)
self.assertEqual(value[:-1], datetime_input.get_attribute("value"))
if __name__ == "__main__":
app.run(test_lib.main)
| [
"[email protected]"
] | |
c8401e8e3188c1d22ddcee1a2d85035f8bdfab43 | de0ea898d18e4faf383d230cf2542335bfa166d5 | /library/views.py | 877c529b48ed090292c6dd4c1e2631133c9a939e | [] | no_license | msadour/book_API | 86121341e66249b51835e5e1c842c8fdde26ba6c | 81477c242647c95897a05ad892bc3e11542defa7 | refs/heads/master | 2022-12-09T16:33:12.027427 | 2020-01-22T13:25:45 | 2020-01-22T13:25:45 | 231,387,598 | 0 | 0 | null | 2022-12-07T23:21:19 | 2020-01-02T13:28:36 | HTML | UTF-8 | Python | false | false | 989 | py | # -*- coding: utf-8 -*-
"""
Views.
"""
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from .permissions import IsOwnerOrReadOnly
from .models import Book
from .serializers import BookSerializer
class HelloView(APIView):
"""
Display the message 'Hello World!' if the permission allows us.
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
content = {'message': 'Hello, World!'}
return Response(content)
class BookViewSet(viewsets.ModelViewSet):
"""
Display book(s) if the permission allows us.
"""
queryset = Book.objects.all()
serializer_class = BookSerializer
permission_classes = [IsAuthenticated,
permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly]
| [
"[email protected]"
] | |
6b59d53ff5dca12c2cf49ecda84be12a1c60a12c | a3644ed207867df4d78a04af39ac3e26f86f9012 | /ibvp/language/symbolic/util.py | cf587104d319938fea973aba507443ccc906a896 | [
"MIT"
] | permissive | ibvp/ibvp | 006887be85a37ac4da51664d5fec9244c446cacd | c758b150cbd822bd17444499bea29c53b0606327 | refs/heads/master | 2022-05-07T02:17:46.232332 | 2022-03-20T19:34:13 | 2022-03-20T19:34:13 | 21,990,116 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,418 | py | from __future__ import division
from __future__ import absolute_import
from six.moves import range
__copyright__ = "Copyright (C) 2010-2013 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import numpy as np
def pretty(expr):
from ibvp.language.symbolic.mappers import PrettyStringifyMapper
stringify_mapper = PrettyStringifyMapper()
from pymbolic.mapper.stringifier import PREC_NONE
result = stringify_mapper(expr, PREC_NONE)
splitter = "="*75 + "\n"
cse_strs = stringify_mapper.get_cse_strings()
if cse_strs:
result = "\n".join(cse_strs)+"\n"+splitter+result
return result
def join_fields(*args):
from pytools.obj_array import make_obj_array, log_shape
from pymbolic.geometric_algebra import MultiVector, bit_count
res_list = []
for arg in args:
if isinstance(arg, list):
res_list.extend(arg)
elif isinstance(arg, MultiVector):
for grade in arg.all_grades():
for bits in range(2**arg.space.dimensions):
if bit_count(bits) == grade:
res_list.append(arg.data.get(bits, 0))
elif isinstance(arg, np.ndarray):
if log_shape(arg) == ():
res_list.append(arg)
else:
res_list.extend(arg.flat)
else:
res_list.append(arg)
return make_obj_array(res_list)
| [
"[email protected]"
] | |
d699aa415671a09c0d3cb6f790fbd8d199a1e504 | 7b6377050fba4d30f00e9fb5d56dfacb22d388e1 | /numericalFunctions/ptwXY/Python/Test/UnitTesting/convolution/convolution.py | 23e1f84ea78f302c6955c15e21ec6115a7eb5cc4 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | LLNL/fudge | 0a4fe8e3a68b66d58e42d1f4d209ea3f713c6370 | 6ba80855ae47cb32c37f635d065b228fadb03412 | refs/heads/master | 2023-08-16T21:05:31.111098 | 2023-08-01T22:09:32 | 2023-08-01T22:09:32 | 203,678,373 | 21 | 4 | NOASSERTION | 2023-06-28T20:51:02 | 2019-08-21T23:22:20 | Python | UTF-8 | Python | false | false | 3,194 | py | # <<BEGIN-copyright>>
# Copyright 2022, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
import os
from numericalFunctions import pointwiseXY_C
if( 'CHECKOPTIONS' in os.environ ) :
options = os.environ['CHECKOPTIONS'].split( )
if( '-e' in options ) : print( __file__ )
CPATH = '../../../../Test/UnitTesting/convolution'
os.system( 'cd %s; ./convolution -v > v' % CPATH )
f = open( os.path.join( CPATH, 'v' ) )
ls = f.readlines( )
f.close( )
line = 1
def getIntegerValue( name, ls ) :
global line
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: line at %s does not contain %s info: "%s"' % ( __file__, line, name, ls[0][:-1] ) )
value = int( ls[0].split( '=' )[1] )
line += 1
return( ls[1:], value )
def getDoubleValue( name, ls ) :
global line
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: line at %s does not contain %s info: "%s"' % ( __file__, line, name, ls[0][:-1] ) )
value = float( ls[0].split( '=' )[1] )
line += 1
return( ls[1:], value )
def compareValues( label, i, v1, v2 ) :
sv1, sv2 = '%.12g' % v1, '%.12g' % v2
sv1, sv2 = '%.8g' % float( sv1 ), '%.8g' % float( sv2 )
if( sv1 != sv2 ) : print( '<%s> <%s>' % ( sv1, sv2 ) )
if( sv1 != sv2 ) : raise Exception( '%s: values %s %s diff by %g at %d for label = %s' % ( __file__, v1, v2, v2 - v1, i, label ) )
def getData( ls, accuracy ) :
global line
i = 0
for l in ls :
if( l.strip( ) != '' ) : break
i = i + 1
line += i
ls = ls[i:]
ls, length = getIntegerValue( 'length', ls )
data = [ list( map( float, ls[i].split( )[:2] ) ) for i in range( length ) ]
data = pointwiseXY_C.pointwiseXY_C( data, initialSize = len( data ), overflowSize = 10, accuracy = accuracy )
line += length
return( ls[length:], data )
def getDatas( ls ) :
global line
i = 0
for l in ls :
if( l.strip( ) != '' ) : break
i = i + 1
line += i
ls = ls[i:]
if( len( ls ) == 0 ) : return( ls )
if( ls[0][:9] == '# Area = ' ) : ls = ls[1:]
if( len( ls ) == 0 ) : return( ls )
label, ls = ls[0], ls[1:]
if( label[:10] != '# label = ' ) : raise Exception( '%s: invalid label = "%s"' % ( __file__, label[:-1] ) )
line += 1
label = label.split( '=' )[1].strip( )
ls, mode = getIntegerValue( 'mode', ls )
ls, accuracy = getDoubleValue( 'accuracy', ls )
ls, self = getData( ls, accuracy )
ls, other = getData( ls, accuracy )
ls, cConvolution = getData( ls, accuracy )
convolution = self.convolute( other, mode )
if( len( convolution ) != len( cConvolution ) ) : raise Exception( '%s: len( convolution ) = %d != len( cConvolution ) = %d for label "%s"' %
( __file__, len( convolution ), len( cConvolution ), label ) )
for i , dXY in enumerate( convolution ) :
gXY = cConvolution[i]
compareValues( label, i, dXY[0], gXY[0] )
compareValues( label, i, dXY[1], gXY[1] )
return( ls )
while( len( ls ) ) : ls = getDatas( ls )
| [
"[email protected]"
] | |
87d8617072a506c92696bd2d28771c0581767428 | cc578cec7c485e2c1060fd075ccc08eb18124345 | /cs15211/FlattenNestedListIterator.py | a0f0bfde784f7bd127acc87b7ee70a319e0c47be | [
"Apache-2.0"
] | permissive | JulyKikuAkita/PythonPrac | 18e36bfad934a6112f727b4906a5e4b784182354 | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | refs/heads/master | 2021-01-21T16:49:01.482561 | 2019-02-07T06:15:29 | 2019-02-07T06:15:29 | 91,907,704 | 1 | 1 | Apache-2.0 | 2019-02-07T06:15:30 | 2017-05-20T18:12:53 | Python | UTF-8 | Python | false | false | 5,253 | py | __source__ = 'https://leetcode.com/problems/flatten-nested-list-iterator/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/flatten-nested-list-iterator.py
# Time: O(n), n is the number of the integers.
# Space: O(h), h is the depth of the nested lists.
#
# Description: Leetcode # 341. Flatten Nested List Iterator
#
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger(object):
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
# Companies
# Google Facebook Twitter
# Related Topics
# Stack Design
# Similar Questions
# Flatten 2D Vector Zigzag Iterator Mini Parser Array Nesting
#
import unittest
class NestedIterator(object):
def __init__(self, nestedList):
"""
Initialize your data structure here.
:type nestedList: List[NestedInteger]
"""
self.__depth = [[nestedList, 0]]
def next(self):
"""
:rtype: int
"""
nestedList, i = self.__depth[-1]
self.__depth[-1][1] += 1
return nestedList[i].getInteger()
def hasNext(self):
"""
:rtype: bool
"""
while self.__depth:
nestedList, i = self.__depth[-1]
if i == len(nestedList):
self.__depth.pop()
elif nestedList[i].isInteger():
return True
else:
self.__depth[-1][1] += 1
self.__depth.append([nestedList[i].getList(), 0])
return False
# Your NestedIterator object will be instantiated and called as such:
# i, v = NestedIterator(nestedList), []
# while i.hasNext(): v.append(i.next())
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* public interface NestedInteger {
*
* // @return true if this NestedInteger holds a single integer, rather than a nested list.
* public boolean isInteger();
*
* // @return the single integer that this NestedInteger holds, if it holds a single integer
* // Return null if this NestedInteger holds a nested list
* public Integer getInteger();
*
* // @return the nested list that this NestedInteger holds, if it holds a nested list
* // Return null if this NestedInteger holds a single integer
* public List<NestedInteger> getList();
* }
*/
# 3ms 94.48%
public class NestedIterator implements Iterator<Integer> {
private Stack<Iterator<NestedInteger>> stack;
Integer nextInteger;
public NestedIterator(List<NestedInteger> nestedList) {
stack = new Stack<>();
if(nestedList != null){
stack.push(nestedList.iterator());
}
}
@Override
public Integer next() {
return nextInteger;
}
@Override
public boolean hasNext() {
while(!stack.isEmpty()){
Iterator<NestedInteger> iter = stack.peek();
if(!iter.hasNext()){
stack.pop();
continue;
}
NestedInteger nextVal = iter.next();
if(nextVal.isInteger()){
nextInteger = nextVal.getInteger();
return true;
}else{
stack.push(nextVal.getList().iterator());
}
}
return false;
}
}
/**
* Your NestedIterator object will be instantiated and called as such:
* NestedIterator i = new NestedIterator(nestedList);
* while (i.hasNext()) v[f()] = i.next();
*/
# 2ms 100%
class NestedIterator implements Iterator<Integer> {
List<NestedInteger> nestedList;
List<Integer> list = new ArrayList<Integer>();
int index;
private void help(List<NestedInteger> input){
// List<Integer> res = new ArrayList<Integer>();
for(NestedInteger item : input){
if(item.isInteger()){
list.add(item.getInteger());
}else{
help(item.getList());
}
}
// System.out.println(res.toString());
// return res;
}
public NestedIterator(List<NestedInteger> nestedList) {
this.nestedList = nestedList;
index = 0;
help(nestedList);
}
@Override
public Integer next() {
return list.get(index++);
}
@Override
public boolean hasNext() {
if(index < list.size()){
return true;
}
return false;
}
}
''' | [
"[email protected]"
] | |
ff975e89943e61a080b93fd3e0356b80d1223b49 | 12258001571bd504223fbf4587870960fa93a46d | /mud/Spirit-0.4.7/spirit/__init__.py | a39206389c749ccbde799c5f371bf90e6be804da | [
"MIT"
] | permissive | Nik0las1984/mud-obj | 0bd71e71855a9b0f0d3244dec2c877bd212cdbd2 | 5d74280724ff6c6ac1b2d3a7c86b382e512ecf4d | refs/heads/master | 2023-01-07T04:12:33.472377 | 2019-10-11T09:10:14 | 2019-10-11T09:10:14 | 69,223,190 | 2 | 0 | null | 2022-12-26T20:15:20 | 2016-09-26T07:11:49 | Python | UTF-8 | Python | false | false | 88 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.4.7'
| [
"[email protected]"
] | |
d3d070c644f324b81f6c492f4cc9cd6582068417 | 5ac7bdec90c21a3da8fd5a1a684a80d202c30e8d | /openstack_dashboard/nikola_auth/views.py | 0f546a5d895bbcf30071ebe64326aa76c07ed578 | [
"Apache-2.0"
] | permissive | AlexOugh/horizon | 185aba38551ee15732a12f9690203d5383e03f70 | bda2a59aad7637f45211db37235ab18323e20b25 | refs/heads/master | 2021-01-16T18:45:36.289172 | 2015-02-10T23:58:16 | 2015-02-10T23:58:16 | 30,272,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,771 | py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import django
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.decorators import login_required # noqa
from django.contrib.auth import views as django_auth_views
from django import shortcuts
from django.utils import functional
from django.utils import http
from django.views.decorators.cache import never_cache # noqa
from django.views.decorators.csrf import csrf_protect # noqa
from django.views.decorators.debug import sensitive_post_parameters # noqa
from keystoneclient import exceptions as keystone_exceptions
from keystoneclient.v2_0 import client as keystone_client_v2
from nikola_auth import forms
# This is historic and is added back in to not break older versions of
# Horizon, fix to Horizon to remove this requirement was committed in
# Juno
from nikola_auth.forms import Login # noqa
from nikola_auth import user as auth_user
from nikola_auth import utils
try:
is_safe_url = http.is_safe_url
except AttributeError:
is_safe_url = utils.is_safe_url
LOG = logging.getLogger(__name__)
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name=None, extra_context=None, **kwargs):
"""Logs a user in using the :class:`~nikola_auth.forms.Login` form."""
# If the user is already authenticated, redirect them to the
# dashboard straight away, unless the 'next' parameter is set as it
# usually indicates requesting access to a page that requires different
# permissions.
if (request.user.is_authenticated() and
auth.REDIRECT_FIELD_NAME not in request.GET and
auth.REDIRECT_FIELD_NAME not in request.POST):
return shortcuts.redirect(settings.LOGIN_REDIRECT_URL)
# Get our initial region for the form.
initial = {}
current_region = request.session.get('region_endpoint', None)
requested_region = request.GET.get('region', None)
regions = dict(getattr(settings, "AVAILABLE_REGIONS", []))
if requested_region in regions and requested_region != current_region:
initial.update({'region': requested_region})
if request.method == "POST":
# NOTE(saschpe): Since https://code.djangoproject.com/ticket/15198,
# the 'request' object is passed directly to AuthenticationForm in
# django.contrib.auth.views#login:
if django.VERSION >= (1, 6):
form = functional.curry(forms.Login)
else:
form = functional.curry(forms.Login, request)
else:
form = functional.curry(forms.Login, initial=initial)
if extra_context is None:
extra_context = {'redirect_field_name': auth.REDIRECT_FIELD_NAME}
if not template_name:
if request.is_ajax():
template_name = 'auth/_login.html'
extra_context['hide'] = True
else:
template_name = 'auth/login.html'
res = django_auth_views.login(request,
template_name=template_name,
authentication_form=form,
extra_context=extra_context,
**kwargs)
# Set the session data here because django's session key rotation
# will erase it if we set it earlier.
if request.user.is_authenticated():
auth_user.set_session_from_user(request, request.user)
regions = dict(forms.Login.get_region_choices())
region = request.user.endpoint
region_name = regions.get(region)
request.session['region_endpoint'] = region
request.session['region_name'] = region_name
return res
def logout(request, login_url=None, **kwargs):
"""Logs out the user if he is logged in. Then redirects to the log-in page.
.. param:: login_url
Once logged out, defines the URL where to redirect after login
.. param:: kwargs
see django.contrib.auth.views.logout_then_login extra parameters.
"""
msg = 'Logging out user "%(username)s".' % \
{'username': request.user.username}
LOG.info(msg)
endpoint = request.session.get('region_endpoint')
token = request.session.get('token')
if token and endpoint:
delete_token(endpoint=endpoint, token_id=token.id)
""" Securely logs a user out. """
return django_auth_views.logout_then_login(request, login_url=login_url,
**kwargs)
def delete_token(endpoint, token_id):
"""Delete a token."""
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
ca_cert = getattr(settings, "OPENSTACK_SSL_CACERT", None)
utils.remove_project_cache(token_id)
try:
if utils.get_keystone_version() < 3:
client = keystone_client_v2.Client(
endpoint=endpoint,
token=token_id,
insecure=insecure,
cacert=ca_cert,
debug=settings.DEBUG
)
client.tokens.delete(token=token_id)
LOG.info('Deleted token %s' % token_id)
else:
# FIXME: KS-client does not have delete token available
# Need to add this later when it is exposed.
pass
except keystone_exceptions.ClientException:
LOG.info('Could not delete token')
@login_required
def switch(request, tenant_id, redirect_field_name=auth.REDIRECT_FIELD_NAME):
"""Switches an authenticated user from one project to another."""
LOG.debug('Switching to tenant %s for user "%s".'
% (tenant_id, request.user.username))
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
ca_cert = getattr(settings, "OPENSTACK_SSL_CACERT", None)
endpoint = request.user.endpoint
try:
if utils.get_keystone_version() >= 3:
if not utils.has_in_url_path(endpoint, '/v3'):
endpoint = utils.url_path_replace(endpoint, '/v2.0', '/v3', 1)
client = utils.get_keystone_client().Client(
tenant_id=tenant_id,
token=request.user.token.id,
auth_url=endpoint,
insecure=insecure,
cacert=ca_cert,
debug=settings.DEBUG)
auth_ref = client.auth_ref
msg = 'Project switch successful for user "%(username)s".' % \
{'username': request.user.username}
LOG.info(msg)
except keystone_exceptions.ClientException:
msg = 'Project switch failed for user "%(username)s".' % \
{'username': request.user.username}
LOG.warning(msg)
auth_ref = None
LOG.exception('An error occurred while switching sessions.')
# Ensure the user-originating redirection url is safe.
# Taken from django.contrib.auth.views.login()
redirect_to = request.REQUEST.get(redirect_field_name, '')
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = settings.LOGIN_REDIRECT_URL
if auth_ref:
old_endpoint = request.session.get('region_endpoint')
old_token = request.session.get('token')
if old_token and old_endpoint and old_token.id != auth_ref.auth_token:
delete_token(endpoint=old_endpoint, token_id=old_token.id)
user = auth_user.create_user_from_token(
request, auth_user.Token(auth_ref), endpoint)
auth_user.set_session_from_user(request, user)
return shortcuts.redirect(redirect_to)
@login_required
def switch_region(request, region_name,
redirect_field_name=auth.REDIRECT_FIELD_NAME):
"""Switches the user's region for all services except Identity service.
The region will be switched if the given region is one of the regions
available for the scoped project. Otherwise the region is not switched.
"""
if region_name in request.user.available_services_regions:
request.session['services_region'] = region_name
LOG.debug('Switching services region to %s for user "%s".'
% (region_name, request.user.username))
redirect_to = request.REQUEST.get(redirect_field_name, '')
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = settings.LOGIN_REDIRECT_URL
return shortcuts.redirect(redirect_to)
| [
"[email protected]"
] | |
904ddc6a110c928eecd9ed053afa3bf80f4931a3 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/25/usersdata/98/11884/submittedfiles/av1_3.py | e38e0f0784c64456ff7dcadb762460593411b8a4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
a=int(input('Digite o valor de a: '))
b=int(input('Digite o valor de b: '))
i=1
cont=0
c=0
while True:
if a%i==0 and b%i==0:
cont=cont+1
c=i
i=i+1
if i==a or i==b:
break | [
"[email protected]"
] | |
961a831640d66bdb4e7113ccbc8e41fd17b88923 | a61263850fe63de61ec3004519f0d9aa69f104ac | /python_Algorithm/battle19/TaxiFee.py | e10f3b4d5de684c4e63460e0d62861c606b5a984 | [] | no_license | Kimhyeonsuk/Programmers_Python | dd0e13ef6690cfab0c46a7c8b07a5f3b40175071 | cc5687c8db2cfa098602829dec3acbf17c5c2177 | refs/heads/master | 2023-07-16T22:30:29.457419 | 2021-09-02T10:40:56 | 2021-09-02T10:40:56 | 355,876,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | def solution(n, s, a, b, fares):
answer = 1e9
board = [[1e9 for _ in range(n + 1)] for _ in range(n + 1)]
for fare in fares:
board[fare[0]][fare[1]] = fare[2]
board[fare[1]][fare[0]] = fare[2]
for i in range(1, n + 1):
board[i][i] = 0
for k in range(1, n + 1):
for i in range(1, n + 1):
for j in range(1, n + 1):
if board[i][j]>board[i][k]+board[k][j]:
board[i][j]=board[i][k]+board[k][j]
for k in range(1, n + 1):
answer = min(answer, board[s][k] + board[k][a] + board[k][b])
return answer | [
"[email protected]"
] | |
e50e19db7754f252118d5e3c69541abe67d0fdab | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/42/usersdata/69/21660/submittedfiles/jain.py | 34c02d431af79001b4eb9414ce0115cad59ff0fc | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,090 | py | # -*- coding: utf-8 -*-
from __future__ import division
import funcoes
'''
ENTRADA TESTE
f = 0.2
dH = 5
L = 3250
Q = 0.005
g = 9.81
v = 0.000001
e = 0.00006
k = 10
A saida para esta entrada é aproximadamente: 0.1247 (D) e 0.0224 (f)
'''
f = 0.2
dH = input('Digite a perda de carga: ')
L = input('Digite o comprimento da tubulação: ')
Q = input('Digite a vazão: ')
g = input('Digite a gravidade: ')
v = input('Digite a viscosidade cinemática: ')
e = input('Digite a rugosidade absoluta: ')
k = 10
#comece aqui
import math
def diametro(fn,L,Q,dH):
Diam=((8*fn*L*Q*Q)/(math.pi*math.pi*dH*g))**(1/5)
return Diam
def Reynalds(Q,D,v):
R=4*Q/(math.pi*D*v)
return R
def atrito(Rey,E,D):
s=(E/(3.7*D))+(5.74/(Rey**0.9))
t=(2500/Rey)**6
f=(((64/Rey)**8)+9.5*((math.log(s)-t)**(-16)))**0.125
return f
for i in range(0,k,1):
D=diametro(fn,L,Q,dH)
Rey=Reynalds(Q,D,v)
fn=atrito(Rey,e,D)
if 0.000001<=(e/D)<=0.01 and 5000<=Rey<=100000000:
if fn==f:
break
else:
f=fn
print('%.10f'%f)
print('%.10f'%D) | [
"[email protected]"
] | |
edf8538fd32c1becb17b39f2cd1cc4dae63a0763 | 652b72b566a84dbd0e667a86759ec5ee793219e0 | /App/carmanager/admin.py | 76a53766630651aae2e3a31a0b00cfa2fd7d7c65 | [] | no_license | RaisaKulakovska/Some-Django-Project | 05a9b0ef376751fbe6d25f2d5d06471bfd84e6be | 9f42e8a739180fd31adca55ebd559539f59f466c | refs/heads/master | 2021-04-03T12:07:13.132940 | 2020-03-31T11:16:17 | 2020-03-31T11:16:17 | 248,351,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.contrib import admin
from .models import CarManager
admin.site.register(CarManager)
| [
"[email protected]"
] | |
3b12aa23f81a807198b89b5e8f7d0a2eec9c9ecd | 1419418226b6ba0f510649daaf62b71554cc2284 | /amatrice/project_GPS_M5.3_M5.8.py | 5aabd4ea3ee678dc37aff80268eb4ebefda90005 | [] | no_license | shineusn/mylife | 2ef48a777e39be2ef746c3dad16ea963d5b23e5e | 61dfa72d9047551746d26b7fe01fb5c2f1f0657a | refs/heads/master | 2020-03-22T13:44:42.422127 | 2018-02-13T18:09:43 | 2018-02-13T18:09:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,896 | py | from matplotlib import pyplot as plt
from numpy import genfromtxt,argmin,array,zeros,ones,where,linspace,r_
from matplotlib.ticker import MultipleLocator
g=genfromtxt('/Users/dmelgar/Amatrice2016/GPS/Cosismico_26Oct2016_GPS_GdL_V1.dat')
insar=genfromtxt(u'/Users/dmelgar/Amatrice2016/InSAR/M5.3-M5.8/Italy_T44/T44_Italy.lltnde')
#Parse GPS
lon_gps=g[:,1]
lat_gps=g[:,2]
north=g[:,6]/1000
east=g[:,4]/1000
up=g[:,8]/1000
#parse insar
lon_insar=insar[:,0]
lat_insar=insar[:,1]
los=insar[:,6]/1000
lookE=insar[:,3]
lookN=insar[:,4]
lookU=insar[:,5]
#Projection variables
projected_gps=9999*ones(len(lon_gps))
los_insar=9999*ones(len(lon_gps))
thresh=0.005
for k in range(len(lon_gps)):
#Get distance from GPS to LOS points
d=((lon_gps[k]-lon_insar)**2+(lat_gps[k]-lat_insar)**2)**0.5
i=argmin(d)
if d[i]<thresh:
#Get los vector
unit_vector=array([lookE[i],lookN[i],lookU[i]])
#project
projected_gps[k]=unit_vector.dot(array([east[k],north[k],up[k]]))
los_insar[k]=los[i]
plt.figure(figsize=(6,10))
plt.subplot(211)
plt.quiver(r_[11.65,lon_gps],r_[43.72,lat_gps],r_[1,east],r_[0,north],scale=0.11)
#i=where(up<0)[0]
#j=where(up>=0)[0]
#plt.quiver(lon_gps[j],lat_gps[j],zeros(len(up[j])),up[j],scale=0.01,color='b')
#plt.quiver(lon_gps[i],lat_gps[i],zeros(len(up[i])),up[i],scale=0.01,color='r')
ax=plt.subplot(212)
i=where(projected_gps<9999)[0]
x=linspace(-0.02,0.02)
y=x
plt.plot(x,y,lw=2,c='k')
plt.scatter(projected_gps[i],los_insar[i],marker='s',s=30,lw=0.2,c='#0080FF')
plt.xlim([-0.02,0.02])
plt.ylim([-0.02,0.02])
xmajorLocator = MultipleLocator(0.01)
ymajorLocator = MultipleLocator(0.01)
ax.xaxis.set_major_locator(xmajorLocator)
ax.yaxis.set_major_locator(ymajorLocator)
plt.ylabel('InSAR LOS (m)')
plt.xlabel('Projected GPS (m)')
plt.subplots_adjust(left=0.2,right=0.97,top=0.99,bottom=0.1)
| [
"[email protected]"
] | |
a861a51696a1ce07f9eff6c8bb1d0344e618b511 | 3cadf60273e5e7ecede807d631d2c9b9e45499ad | /src/18_stuff/task02.py | d28f964b1cb2678dc24838eebe40832c175a7700 | [] | no_license | shamanengine/HackerRank | 78a4316713518601f4f0499626fbce8766e004df | 8f6c4afa0b6d1e1e934af6ba173c00eae249f42e | refs/heads/master | 2021-08-27T17:57:34.391358 | 2021-08-13T15:17:17 | 2021-08-13T15:17:17 | 143,048,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | '''
Given 2 numbers, provide number of all perfect squares between them
'''
import math
a, b = map(int, input().strip().split())
i = 0
for x in range(int(math.ceil(a ** (1 / 2))), b):
if x ** 2 <= b:
i += 1
# print(x)
else:
break
print(i)
'''
Input
1 50
25590 26590
9 49
Output
7
4
5
'''
| [
"[email protected]"
] | |
a2c079a98705ce6a129fe2a91296597395f2abee | afb2bdf8044e4c9ff09b1b8379efbc17867d8cc0 | /4parts/challenge/challenge2.py | e60f5117ceda493cf23d8d7097d1376bfa4b1068 | [] | no_license | ChenFu0420/leranpython | b2e364ff8d6730a3eb768b76f0369faa3367dfa2 | 52d0aa614d7fab19e17bbb696330a0330d3862b6 | refs/heads/master | 2020-05-29T19:46:24.020046 | 2019-09-25T09:17:10 | 2019-09-25T09:17:10 | 189,339,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | n = eval(input())
for i in range(n):
for j in range(0, n - i):
print(end=" ")
for k in range(2 * i + 1):
print("*",end="")
print() | [
"[email protected]"
] | |
974d749d361019cdd9d6bb1b34a159f82ee40042 | 5d6201c7da4f19bc92f003b98629a10bd62e2426 | /main/migrations/0002_auto_20151106_1447.py | 2124b4360f205d273ee5ba1b8c5961096578fe9e | [] | no_license | azul-cloud/travelblogwave | 35b24cf9550a544eeaeaa01d99b085930f5f410b | 8c5dba290723484c3832606e9da0deba642395de | refs/heads/master | 2021-01-21T05:01:11.100319 | 2016-05-30T12:25:25 | 2016-05-30T12:25:25 | 22,630,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 532 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, verbose_name='username'),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
c4de4f95686f6d39c4a347e4462b601fbc2bd6d2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03828/s176803120.py | 3c09dd5cfe45d562d5aee2961335ac10dec7d7b7 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | from collections import Counter
MOD = 10 ** 9 + 7
def factorize(n):
""" Simple factorize
:param n: number to factorize
:return: list of factors
time complexity : O(n√n)
space complexity : O(n)
"""
factors = []
for i in range(2, n+1):
while n % i == 0:
n = n // i
factors.append(i)
return factors
def main():
N = int(input())
factors = []
for i in range(1, N+1):
factors += factorize(i)
factor_counts = list(Counter(factors).values())
ans = 1
for v in factor_counts:
ans = ans * (v+1) % MOD
print(ans)
main() | [
"[email protected]"
] | |
9e5b1b073c0e724704be0a80caf06b160652600f | abc1a497c41ddd8669c8c41da18af65d08ca54e4 | /try/recon/analize_recon_event.py | 94841cb3f10478d5f14b3da82297e1331ee0b6fd | [] | no_license | gerakolt/direxeno_privet | fcef5e3b654720e277c48935acc168472dfd8ecc | 75e88fb1ed44fce32fce02677f64106121259f6d | refs/heads/master | 2022-12-20T22:01:30.825891 | 2020-10-04T06:01:07 | 2020-10-04T06:01:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,176 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import time
import os
import sys
pmts=np.array([0,1,4,7,8,15])
BGpath='/home/gerak/Desktop/DireXeno/190803/BG/EventRecon/'
path='/home/gerak/Desktop/DireXeno/190803/Co57/EventRecon/'
blw_cut=15
init_cut=20
chi2_cut=5000
left=0
right=400
Rec=np.recarray(100000, dtype=[
('area', 'i8', len(pmts)),
('blw', 'f8', len(pmts)),
('id', 'i8'),
('chi2', 'f8', len(pmts)),
('h', 'i8', (200, len(pmts))),
('init_event', 'i8'),
('init_wf', 'i8', len(pmts))
])
j=0
id=0
WFs=np.zeros((len(pmts), 1000))
recon_WFs=np.zeros((len(pmts), 1000))
# for filename in os.listdir(path):
# if filename.endswith(".npz") and filename.startswith("recon1ns"):
# print(filename)
# data=np.load(path+filename)
# rec=data['rec']
# WFs+=data['WFs']
# recon_WFs+=data['recon_WFs']
# for r in rec:
# Rec[j]['area']=r['area']
# Rec[j]['blw']=r['blw']
# Rec[j]['id']=r['id']
# Rec[j]['chi2']=r['chi2']
# Rec[j]['init_wf']=r['init_wf']
# Rec[j]['h']=r['h']
# Rec[j]['init_event']=r['init_event']
# if r['id']>id:
# id=r['id']
# j+=1
# # sys.exit()
# os.remove(path+filename)
# np.savez(path+'recon1ns'.format(id), rec=Rec[:j-1], WFs=WFs, recon_WFs=recon_WFs)
data=np.load(BGpath+'recon1ns.npz')
BG=data['rec']
data=np.load(path+'recon1ns.npz')
rec=data['rec']
WFs=data['WFs']
recon_WFs=data['recon_WFs']
fig, ax=plt.subplots(2,3)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].plot(x, WFs[i], 'r1', label='WF: PMT{}'.format(pmts[i]))
np.ravel(ax)[i].plot(x, recon_WFs[i], 'b-.', label='Recon')
np.ravel(ax)[i].legend(fontsize=12)
fig, ax=plt.subplots(2,3)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['init_wf'][:,i], bins=100, range=[0,400], label='PMT{} init_wf'.format(pmts[i]))
np.ravel(ax)[i].legend(fontsize=15)
rec=rec[np.all(rec['init_wf']>init_cut, axis=1)]
BG=BG[np.all(BG['init_wf']>init_cut, axis=1)]
fig, ax=plt.subplots(2,3)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
x=np.arange(1000)/5
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['blw'][:,i], bins=100, range=[0,30], label='PMT{} BLW'.format(pmts[i]))
np.ravel(ax)[i].legend(fontsize=15)
plt.figure()
plt.hist(np.sqrt(np.sum(rec['blw']**2, axis=1)), bins=100, label='BLW', range=[0,30])
plt.axvline(blw_cut, ymin=0, ymax=1, color='k')
plt.legend(fontsize=15)
rec=rec[np.sqrt(np.sum(rec['blw']**2, axis=1))<blw_cut]
BG=BG[np.sqrt(np.sum(BG['blw']**2, axis=1))<blw_cut]
fig, ax=plt.subplots(3,2)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].hist(rec['chi2'][:,i], bins=100, label='PMT{} chi2'.format(pmts[i]))
np.ravel(ax)[i].set_yscale('log')
np.ravel(ax)[i].legend(fontsize=15)
plt.figure()
plt.hist(np.sqrt(np.sum(rec['chi2']**2, axis=1)), bins=100, label='chi2')
plt.axvline(chi2_cut, ymin=0, ymax=1, color='k')
plt.legend(fontsize=15)
plt.yscale('log')
rec=rec[np.sqrt(np.sum(rec['chi2']**2, axis=1))<chi2_cut]
rec=rec[np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1)>0]
BG=BG[np.sqrt(np.sum(BG['chi2']**2, axis=1))<chi2_cut]
BG=BG[np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1)>0]
init=np.sum(np.sum(rec['h'][:,:10,:], axis=2), axis=1)
full=np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1)
BGinit=np.sum(np.sum(BG['h'][:,:10,:], axis=2), axis=1)
BGfull=np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1)
plt.figure()
plt.hist(init/full, bins=100, range=[0,1], label='Relative number of PEs in first 10 ns')
rec=rec[init/full<0.5]
BG=BG[BGinit/BGfull<0.5]
fig, ax=plt.subplots(3,2)
fig.subplots_adjust(wspace=0, hspace=0)
fig.suptitle('Co57', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].plot(np.mean(rec['h'][:,:,i], axis=0), 'k-.', label='PMT{}'.format(pmts[i]))
plt.figure()
up=np.sum(rec['h'][:,:100,0], axis=1)+np.sum(rec['h'][:,:100,1], axis=1)
dn=np.sum(rec['h'][:,:100,-1], axis=1)+np.sum(rec['h'][:,:100,-2], axis=1)+np.sum(rec['h'][:,:100,-3], axis=1)
plt.plot(np.arange(450), np.arange(450)*3+18, 'k--')
plt.hist2d(up, dn, bins=[100, 100], range=[[0,350], [0,700]], norm=mcolors.PowerNorm(0.3))
plt.xlabel('Sum of PEs in the top floor PMTs', fontsize=25)
plt.ylabel('Sum of PEs in the bottom floor PMTs', fontsize=25)
rec0=rec
rec=rec[dn<3*up+18]
plt.legend(fontsize=15)
TB=1564926608911-1564916365644
TA=1564916315672-1564886605156
TBG=1564874707904-1564826183355
TCs=1564823506349-1564820274767
hist, bins=np.histogram(np.sum(np.sum(BG['h'][:,:100,:], axis=2), axis=1), bins=np.arange(250)*4)
plt.figure()
plt.hist(np.sum(np.sum(rec['h'][:,:100,:], axis=2), axis=1), bins=np.arange(250)*4, histtype='step', linewidth=5, label='All events')
plt.bar(0.5*(bins[1:]+bins[:-1]) ,TA/TBG*hist, label='BG', width=bins[1:]-bins[:-1], color='orange', alpha=0.5)
plt.axvline(left, 0 ,1, color='k')
plt.axvline(right, 0 ,1, color='k')
plt.legend(fontsize=15)
fig, ax=plt.subplots(2,3)
# fig.suptitle('Co57 - Spec - slow', fontsize=25)
for i in range(len(pmts)):
np.ravel(ax)[i].hist(np.sum(rec['h'][:,:,i], axis=1), bins=np.arange(200), histtype='step', label='After\n up-dn cut\n PMT{}'.format(i), linewidth=3)
np.ravel(ax)[i].hist(np.sum(rec0['h'][:,:,i], axis=1), bins=np.arange(200), histtype='step', label='Before\n up-dn cut', linewidth=3)
np.ravel(ax)[i].legend(fontsize=15)
# fig, ax=plt.subplots(3,5)
# k=0
# for i in range(len(pmts)-1):
# hi=rec['h'][:,:,i]
# for j in range(i+1, len(pmts)):
# hj=rec['h'][:,:,j]
# np.ravel(ax)[k].hist((np.sum(hi, axis=1)-np.mean(np.sum(hi, axis=1)))*(np.sum(hj, axis=1)-np.mean(np.sum(hj, axis=1)))/(np.mean(np.sum(hj, axis=1))*np.mean(np.sum(hi, axis=1))),
# label='PMT{}-PMT{}'.format(pmts[i], pmts[j]), bins=100, range=[-1, 1])
# np.ravel(ax)[k].legend()
# k+=1
plt.show()
| [
"[email protected]"
] | |
ed7791ad961fa9dd1d63297906e9bc6fdf71ef7c | be84495751737bbf0a8b7d8db2fb737cbd9c297c | /tests/test_intersections/triangle2.py | 910e5c8217bcf254300859b37732a19f7136177f | [] | no_license | mario007/renmas | 5e38ff66cffb27b3edc59e95b7cf88906ccc03c9 | bfb4e1defc88eb514e58bdff7082d722fc885e64 | refs/heads/master | 2021-01-10T21:29:35.019792 | 2014-08-17T19:11:51 | 2014-08-17T19:11:51 | 1,688,798 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,283 | py |
from tdasm import Tdasm, Runtime
from renmas.maths import Vector3
from renmas.shapes import Triangle, intersect_ray_shape_array
from renmas.core import Ray
import random
import renmas.utils as util
import timeit
asm_structs = util.structs("ray", "triangle", "hitpoint")
SSE2_ASM = """
#DATA
"""
SSE2_ASM += asm_structs + """
ray r1
triangle tri1
hitpoint hp
float one = 1.0
float zero = 0.0
float epsilon = 0.00001
float beta
float coff
float min_dist = 999999.0
float xm0[4]
float xm1[4]
float xm2[4]
float xm3[4]
float xm4[4]
float xm5[4]
float xm6[4]
float xm7[4]
uint32 xm0i[4]
uint32 result
uint32 n = 1000000
#CODE
mov eax, r1
mov ebx, tri1
mov ecx, min_dist
mov edx, hp
call ray_triangle
movaps oword [xm0], xmm0
movaps oword [xm1], xmm1
movaps oword [xm2], xmm2
movaps oword [xm3], xmm3
movaps oword [xm4], xmm4
movaps oword [xm5], xmm5
movaps oword [xm6], xmm6
movaps oword [xm7], xmm7
movaps oword [xm0i], xmm0
mov dword [result], eax
#END
global ray_triangle:
movaps xmm0, oword [ebx + triangle.p0]
movaps xmm2, oword [eax + ray.dir]
movaps xmm1, xmm0
subps xmm1, oword [ebx + triangle.p2]
movaps xmm3, xmm0
subps xmm3, oword [eax + ray.origin]
subps xmm0, oword [ebx + triangle.p1]
; f f h f
movaps xmm4, xmm1
movlhps xmm4, xmm3
shufps xmm4, xmm4, 01110101B
; k k k l
movaps xmm5, xmm2
movhlps xmm5, xmm3
shufps xmm5, xmm5, 00101010B
; f f h f * k k k l
movaps xmm7, xmm4
mulps xmm7, xmm5
; g g g h
movaps xmm6, xmm2
movlhps xmm6, xmm3
shufps xmm6, xmm6, 11010101B
; j j l j
movaps xmm4, xmm1
movhlps xmm4, xmm3
shufps xmm4, xmm4, 10001010B
; g g g h * j j l j
mulps xmm4, xmm6
; f f h f * k k k l - g g g h * j j l j
subps xmm7, xmm4
; a d a a
movaps xmm5, xmm0
movlhps xmm5, xmm3
shufps xmm5, xmm5, 00001000B
; a d a a * (f f h f * k k k l - g g g h * j j l j)
mulps xmm7, xmm5
; i l i i
movaps xmm5, xmm0
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10100010B
; g g g h * i l i i
mulps xmm6, xmm5
; e h e e
movaps xmm4, xmm0
movlhps xmm4, xmm3
shufps xmm4, xmm4, 01011101B
; k k k l
movaps xmm5, xmm2
movhlps xmm5, xmm3
shufps xmm5, xmm5, 00101010B
; e h e e * k k k l
mulps xmm5, xmm4
; g g g h * i l i i - e h e e * k k k l
subps xmm6, xmm5
; b b d b
movaps xmm5, xmm1
movlhps xmm5, xmm3
shufps xmm5, xmm5, 00100000B
; b b d b * (g g g h * i l i i - e h e e * k k k l)
mulps xmm6, xmm5
addps xmm7, xmm6
; j j l j
movaps xmm5, xmm1
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10001010B
; e e h e * j j l j
mulps xmm4, xmm5
; f f h f
movaps xmm6, xmm1
movlhps xmm6, xmm3
shufps xmm6, xmm6, 01110101B
; i l i i
movaps xmm5, xmm0
movhlps xmm5, xmm3
shufps xmm5, xmm5, 10100010B
; f f h f * i l i i
mulps xmm6, xmm5
; e h e e * j j l j - f f h f * i l i i
subps xmm4, xmm6
; c c c d
movaps xmm5, xmm2
movlhps xmm5, xmm3
shufps xmm5, xmm5, 10000000B
; c c c d * (e h e e * j j l j - f f h f * i l i i)
mulps xmm4, xmm5
addps xmm7, xmm4
movhlps xmm5, xmm7
movaps xmm4, xmm7
shufps xmm4, xmm4, 0x55
movaps xmm6, xmm7
shufps xmm6, xmm6, 0xFF
; xmm7 = d
; xmm6 = td
; xmm5 = gamma
; xmm4 = beta
pxor xmm3, xmm3
; beta < 0.0
movaps xmm0, xmm7
xorps xmm0, xmm4
cmpss xmm0, xmm3, 5
; gamma < 0.0
movaps xmm1, xmm7
xorps xmm1, xmm5
cmpss xmm1, xmm3, 5
; accumulation of conditions
andps xmm0, xmm1
; beta + gamma < 1.0
movaps xmm2, xmm4
addps xmm2, xmm5
cmpss xmm2, xmm6, 2
andps xmm0, xmm2
movd esi, xmm0
cmp esi, 0
jne _accept
xor eax, eax
ret
_accept:
divss xmm6, xmm7
comiss xmm6, dword [epsilon]
jc _reject
comiss xmm6, dword [ecx] ;minimum distance
jnc _reject
;populate hitpoint structure
; t is in xmm6
movaps xmm2, oword [eax + ray.dir]
movaps xmm3, oword [ebx + triangle.normal]
movss xmm4, dword [ebx + triangle.mat_index]
movss dword [edx + hitpoint.t], xmm6
movaps oword [edx + hitpoint.normal], xmm3
movss dword [edx + hitpoint.mat_index], xmm4
macro broadcast xmm5 = xmm6[0]
mulps xmm5, xmm2
macro eq128 edx.hitpoint.hit = xmm5 + eax.ray.origin
mov eax, 1
ret
_reject:
xor eax, eax
ret
"""
def create_triangle():
p0 = Vector3(0.1, 0.0, -2.0)
p1 = Vector3(4.0, 0.5, 0.2)
p2 = Vector3(2.2, 4.3, -1.0)
tr = Triangle(p0, p1, p2, 3)
return tr
def create_ray():
origin = Vector3(0.0, 0.0, 0.0)
dirx = 0.985906665972
diry = 0.165777376892
dirz = 0.0224923832256
#direction = Vector3(8.8, 8.9, 8.7)
direction = Vector3(dirx, diry, dirz)
#direction.normalize()
ray = Ray(origin, direction)
return ray
def v4(v3):
return (v3.x, v3.y, v3.z, 0.0)
if __name__ == "__main__":
tr = create_triangle()
ray = create_ray()
hp = tr.isect(ray)
if hp is not False:
print(hp.t)
asm = util.get_asm()
mc = asm.assemble(SSE2_ASM)
#mc.print_machine_code()
runtime = Runtime()
ds = runtime.load("test", mc)
ds["tri1.p0"] = v4(tr.v0)
ds["tri1.p1"] = v4(tr.v1)
ds["tri1.p2"] = v4(tr.v2)
ds["tri1.normal"] = v4(tr.normal)
ds["tri1.mat_index"] = tr.material
ds["r1.origin"] = v4(ray.origin)
ds["r1.dir"] = v4(ray.dir)
runtime.run("test")
print("xmm0 = ", ds["xm0"])
print("xmm1 = ", ds["xm1"])
print("xmm2 = ", ds["xm2"])
print("xmm3 = ", ds["xm3"])
print("xmm4 = ", ds["xm4"])
print("xmm5 = ", ds["xm5"])
print("xmm6 = ", ds["xm6"])
print("xmm7 = ", ds["xm7"])
print("xmm7i = ", ds["xm0i"])
print("Rezultat je = ", ds["result"])
print(ds["hp.normal"])
print(hp.normal)
print(ds["hp.mat_index"])
print(hp.material)
print(ds["hp.hit"])
print(hp.hit_point)
print(ds["hp.t"])
print(hp.t)
| [
"[email protected]"
] | |
f5e6065e2191f1f68e81fc65acc158143819626d | a884039e1a8b0ab516b80c2186e0e3bad28d5147 | /Livros/Livro-Introdução à Programação-Python/Capitulo 7/Exemplos 7/Listagem7_17.py | 69bd31b1f28fb805b79086213f580f796b1c8375 | [
"MIT"
] | permissive | ramonvaleriano/python- | 6e744e8bcd58d07f05cd31d42a5092e58091e9f0 | ada70918e945e8f2d3b59555e9ccc35cf0178dbd | refs/heads/main | 2023-04-10T14:04:24.497256 | 2021-04-22T18:49:11 | 2021-04-22T18:49:11 | 340,360,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | # Program: Listagem7_17.py
# Author: Ramon R. Valeriano
# Description:
# Developed: 18/05/2020 - 20:19
# Updated:
m = "Uma linha\nOutra Linhas\nE mais outra linha."
print(m)
print(m.splitlines())
| [
"[email protected]"
] | |
3d7c5b4eb3d00606ff5abe8c11832193e7201eb5 | 4331b28f22a2efb12d462ae2a8270a9f666b0df1 | /.history/dvdstore/webapp/views_20190914163031.py | 980217ae2624c75216a4e349f8f403f3cc89970e | [] | no_license | ZiyaadLakay/csc312.group.project | ba772a905e0841b17478eae7e14e43d8b078a95d | 9cdd9068b5e24980c59a53595a5d513c2e738a5e | refs/heads/master | 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null | UTF-8 | Python | false | false | 10,234 | py | from django.shortcuts import render
from .models import DVD, Transaction, Customer
from django.core.paginator import EmptyPage,PageNotAnInteger, Paginator
from django.db.models import Q
from django.contrib.auth.models import User, auth
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.files.storage import FileSystemStorage
from django.contrib.auth.decorators import login_required, permission_required
from .form import DocumentForm
import datetime
#This is the homepage for the User
def home(request):
dvds = DVD.objects.all() #imports dvds from database
query = request.GET.get("query")
gen = request.GET.get("gen")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query))#Search Function according to name
if not DVD.objects.filter(Q(Title__icontains=query)).exists():
messages.info(request,'No search results for : '+query)
elif gen:
dvds = DVD.objects.filter(Q(genre__icontains=gen))#Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
genre = {'Action', 'Comedy', 'Drama', 'Family', 'Romance'}
return render(request, 'home.html', {'dvds':dvds}, {'genre':genre}) #renders the page
#This is the page for clerks
@login_required
def clerk(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
dvds = DVD.objects.filter(Q(Title__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'clerk.html',context_dict)
@login_required
def userstbl(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
users = User.objects.filter(Q(username__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'userstbl.html',context_dict)
@login_required
def transactions(request):
dvds = DVD.objects.all() #imports dvds from database
trans = Transaction.objects.all() #imports dvds from database
users = User.objects.all() #imports dvds from database
customer = Customer.objects.all() #imports dvds from database
query = request.GET.get("query")
if query:
trans = Transaction.objects.filter(Q(TransactionNumber__icontains=query)) #Search Function according to name
paginator = Paginator(dvds, 6) # Show 3 dvds per page
page = request.GET.get('page')
dvds = paginator.get_page(page)
form=DocumentForm()
context_dict = { 'dvds':dvds ,'form': form, 'trans':trans, 'users':users, 'customer':customer}
return render(request, 'transactions.html',context_dict)
def register2(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
username= request.POST['username']
email= request.POST['email']
password1= first_name[0]+last_name
if User.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('clerk')
elif User.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
user = User.objects.create_user(username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
user.save()
messages.info(request, 'User Created')
return redirect('/clerk')
def model_form_upload(request):
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('/clerk')
def booking(request):
username= request.POST['username']
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup=username)
return redirect('home')
def checkout(request):
dvdID= request.POST['dvdID']
numOfDays=request.POST['numDaysBooked']
dvdPrice=request.POST['dvdPrice']
users_ID=request.POST['user_ID']
MovieTitle=request.POST['MovieTitle']
payment=request.POST['payment']
bill=int(numOfDays)*int(dvdPrice)
DVD.objects.filter(id=dvdID).update(NumDaysBooked=numOfDays,InStock=False)
RentDate= datetime.date.today()
DueDate=RentDate+datetime.timedelta(days=int(numOfDays))
t = datetime.datetime.now().strftime("%H%M%S")
TransactionNumber=payment+str(RentDate)[0:4]+str(RentDate)[8:10]+t
#Amount
trans = Transaction(users_ID=users_ID, TransactionNumber=TransactionNumber, RentDate=RentDate, DueDate=DueDate, MovieTitle=MovieTitle, Payment_Method=payment,Amount="R"+str(bill),dvdID=dvdID)
trans.save()
return redirect('/clerk')
def checkin(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).update(BookingPickup='None',InStock=True,NumDaysBooked=0)
return redirect('/clerk')
def deleteMovie(request):
dvdID= request.POST['dvdID']
DVD.objects.filter(id=dvdID).delete()
return redirect('/clerk')
def deleteTransaction(request):
transID= request.POST['transID']
Transaction.objects.filter(id=transID).delete()
return redirect('/transactions')
def deleteUser(request):
userID= request.POST['userID']
User.objects.filter(id=userID).delete()
return redirect('/userstbl')
def user_detail(request):
id = None
if request.user.is_authenticated:
id = request.user.id
print(id)
detail2 = Customer.objects.all()
detail1 = User.objects.filter( id = id )
#detail2 = Customer.objects.filter(Q(username__icontains=str(detail1[0]))).values()
#answers_list = list(detail2)
#myString=str(answers_list[0])
#import re
#myarray=re.split(':|,',myString)
#if len(myarray)>39:
# for i in range(len(myarray)):
# print(str(i)+" "+str(myarray[i]))
# phone_number=str(myarray[39])
# address=str(myarray[41])
# identification=str(myarray[43])
# return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2,'phone_number':phone_number,'identification':identification ,'address':address})
return render(request, 'user_detail.html',{'detail1':detail1 , 'detail2' : detail2})
def registerCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
password1= request.POST['password1']
password2= request.POST['password2']
username= request.POST['username']
if password1 == password2 :
if Customer.objects.filter(username=username).exists():
messages.info(request, 'Username Taken')
return redirect('register.html')
elif Customer.objects.filter(email=email).exists():
messages.info(request, 'Email Taken')
return redirect('register.html')
user = Customer.objects.create_user(phone_number=phone_number, address=address,identification=identification,username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
user.save()
# customer.save()
messages.info(request, 'User Created')
# messages.info(request, 'Customer Created')
return redirect('login.html')
else:
print('password does not match')
messages.info(request, 'Password does not match')
return redirect('register.html')
return redirect('login.html')
else:
return render(request, 'register.html')
def updateCustomer(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
phone_number= request.POST['phone_number']
address= request.POST['address']
identification= request.POST['identification']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = Customer.objects.filter(id=userID).update(phone_number=phone_number, address=address,identification=identification,username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
def updateUser(request):
if request.method == 'POST':
first_name= request.POST['first_name']
last_name= request.POST['last_name']
email= request.POST['email']
username= request.POST['username']
userID=request.POST['userID']
user = User.objects.filter(id=userID).update(username=username, email=email, first_name=first_name, last_name=last_name)
# customer = Customer.objects.create_user(phone_number=phone_number,identification=identification,address=address)
return redirect('home')
| [
"[email protected]"
] | |
611ca1b0710e080956b3f0259d5042c17ada5814 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/signalr/azure-mgmt-signalr/azure/mgmt/signalr/aio/operations/_usages_operations.py | aa1860efef37dbf2413c285639f2957501b5bfdb | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 5,150 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._usages_operations import build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class UsagesOperations:
"""UsagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.signalr.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.SignalRUsageList"]:
"""List resource usage quotas by location.
:param location: the location like "eastus".
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SignalRUsageList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.signalr.models.SignalRUsageList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SignalRUsageList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
location=location,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SignalRUsageList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.SignalRService/locations/{location}/usages'} # type: ignore
| [
"[email protected]"
] | |
702e397972e162ab5ddf2af196684a76f393bd61 | 71673d845952b50986d1c21dc5bbbcab2a2a2651 | /introduction_to_lxml.py | 0783fcf78d6a6982eff93f7b0558518976c20d60 | [] | no_license | afcarl/introductionToWebScraping | 77a44bfb7655e44231bed216d37b015e3cf52a5c | d1039aeee87365f2807dd198e53bd1bb6224a550 | refs/heads/master | 2020-03-26T04:23:54.052825 | 2015-06-18T14:23:40 | 2015-06-18T14:23:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | import requests
import lxml.html
base_url = "https://www.google.com"
def scrape(url,base_url,depth):
if depth == 0:
return True
r = requests.get(url)
html = lxml.html.fromstring(r.text)
links = html.xpath("//a/@href")
for ind,link in enumerate(links):
if "http" in link:
print link
else:
print base_url+link
links[ind] = base_url+link
for link in links:
scrape(link,base_url,depth-1)
scrape(base_url,base_url,5)
| [
"[email protected]"
] | |
736a6dd319cdb36e01d57e42fdf371c5db550c22 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/ghwatson_faststyle/faststyle-master/losses.py | 7a4cc6b60cea27257d8a4820a88ca8fb5d7f1574 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,526 | py | """
This file contains the different loss functions.
File author: Grant Watson
Date: Feb 2017
"""
import tensorflow as tf
import numpy as np
def content_loss(content_layers, target_content_layers,
content_weights):
"""Defines the content loss function.
:param content_layers
List of tensors for layers derived from training graph.
:param target_content_layers
List of placeholders to be filled with content layer data.
:param content_weights
List of floats to be used as weights for content layers.
"""
assert(len(target_content_layers) == len(content_layers))
num_content_layers = len(target_content_layers)
# Content loss
content_losses = []
for i in xrange(num_content_layers):
content_layer = content_layers[i]
target_content_layer = target_content_layers[i]
content_weight = content_weights[i]
loss = tf.reduce_sum(tf.squared_difference(content_layer,
target_content_layer))
loss = content_weight * loss
_, h, w, c = content_layer.get_shape().as_list()
num_elements = h * w * c
loss = loss / tf.cast(num_elements, tf.float32)
content_losses.append(loss)
content_loss = tf.add_n(content_losses, name='content_loss')
return content_loss
def style_loss(grams, target_grams, style_weights):
"""Defines the style loss function.
:param grams
List of tensors for Gram matrices derived from training graph.
:param target_grams
List of numpy arrays for Gram matrices precomputed from style image.
:param style_weights
List of floats to be used as weights for style layers.
"""
assert(len(grams) == len(target_grams))
num_style_layers = len(target_grams)
# Style loss
style_losses = []
for i in xrange(num_style_layers):
gram, target_gram = grams[i], target_grams[i]
style_weight = style_weights[i]
_, c1, c2 = gram.get_shape().as_list()
size = c1*c2
loss = tf.reduce_sum(tf.square(gram - tf.constant(target_gram)))
loss = style_weight * loss / size
style_losses.append(loss)
style_loss = tf.add_n(style_losses, name='style_loss')
return style_loss
def tv_loss(X):
"""Creates 2d TV loss using X as the input tensor. Acts on different colour
channels individually, and uses convolution as a means of calculating the
differences.
:param X:
4D Tensor
"""
# These filters for the convolution will take the differences across the
# spatial dimensions. Constructing these on paper has to be done carefully,
# but can be easily understood when one realizes that the sub-3x3 arrays
# should have no mixing terms as the RGB channels should not interact
# within this convolution. Thus, the 2 3x3 subarrays are identity and
# -1*identity. The filters should look like:
# v_filter = [ [(3x3)], [(3x3)] ]
# h_filter = [ [(3x3), (3x3)] ]
ident = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
v_array = np.array([[ident], [-1*ident]])
h_array = np.array([[ident, -1*ident]])
v_filter = tf.constant(v_array, tf.float32)
h_filter = tf.constant(h_array, tf.float32)
vdiff = tf.nn.conv2d(X, v_filter, strides=[1, 1, 1, 1], padding='VALID')
hdiff = tf.nn.conv2d(X, h_filter, strides=[1, 1, 1, 1], padding='VALID')
loss = tf.reduce_sum(tf.square(hdiff)) + tf.reduce_sum(tf.square(vdiff))
return loss
| [
"[email protected]"
] | |
4de4a3deb1892d8a98427efd454a04849d8f4eda | f2fcf807b441aabca1ad220b66770bb6a018b4ae | /coderbyte/letter_capitalize.py | 0db39e3300d9e2d0d879d72ba1ae2420481d6fcb | [] | no_license | gokou00/python_programming_challenges | 22d1c53ccccf1f438754edad07b1d7ed77574c2c | 0214d60074a3b57ff2c6c71a780ce5f9a480e78c | refs/heads/master | 2020-05-17T15:41:07.759580 | 2019-04-27T16:36:56 | 2019-04-27T16:36:56 | 183,797,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | def LetterCapitalize(str):
# code goes here
return str.title()
print(LetterCapitalize("h3llo yo people")) | [
"[email protected]"
] | |
12b99157ef91baeba1b980e70567b5588589fb0c | a616d3f1491eae4a702d18ab30e2b3cfd43e1563 | /scrollbar.py | a43c58dca1721d5742a1355ef8ec4ffaf02cb63d | [] | no_license | supriadi-yusuf/python-GUI | 9d15c27fcaabb55aa61ccabef2afcc3f9a26370f | 557ab9720442d7d810567441119c3efa4b1b7b34 | refs/heads/master | 2020-06-29T16:47:58.236428 | 2019-08-07T01:09:09 | 2019-08-07T01:09:09 | 200,570,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | from tkinter import (
Tk, RIGHT, Y, Scrollbar, Listbox, END, BOTH, LEFT
)
layar=Tk()
layar.title("Scrollbar")
myScrollbar = Scrollbar(master=layar)
#myScrollbar.pack(side=RIGHT, fill=Y)
#myScrollbar.pack(side=LEFT, fill=Y)
myList = Listbox(master=layar,
#height=5,
yscrollcommand=myScrollbar.set)
for line in range(100):
myList.insert(END, "This is line number " + str(line))
#myList.pack(side=LEFT,fill=BOTH)
myList.pack(side=LEFT,fill=Y)
myScrollbar.pack(side=LEFT, fill=Y)
myScrollbar.config(command=myList.yview)
layar.mainloop()
| [
"[email protected]"
] | |
40a5badf20a8815924f3d9ea4e245dba81149a88 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03588/s910432178.py | 314d7a583d1067ee67cd31e93342774353c07a3a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | import sys
def solve():
readline = sys.stdin.buffer.readline
mod = 10 ** 9 + 7
n = int(readline())
ab = [list(map(int, readline().split())) for _ in range(n)]
ab.sort()
print((ab[-1][0] - ab[0][0] + 1) + (ab[0][0] - 1) + (ab[-1][1]))
if __name__ == '__main__':
solve()
| [
"[email protected]"
] | |
530008283c1d95b2bbd68b84c9530f2593eceb96 | 5de718a2ab00460f59621e1e3c100b37c0853f61 | /env/Lib/site-packages/flask/app.py | b321c63f7c4bba6de67231bffec304fc04d9759d | [] | no_license | HenryVo31/Connect | 3fd60d893edd1199663878b7b68505e57a410dd6 | 3783e5b4d6b58f19e37ccff66501cb78c35c1500 | refs/heads/master | 2023-02-13T14:21:12.692446 | 2021-01-08T21:40:16 | 2021-01-08T21:40:16 | 295,485,939 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97,137 | py | # -*- coding: utf-8 -*-
"""
flask.app
~~~~~~~~~
This module implements the central WSGI application object.
:copyright: © 2010 by the Pallets team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import warnings
from datetime import timedelta
from functools import update_wrapper
from itertools import chain
from threading import Lock
from werkzeug.datastructures import Headers, ImmutableDict
from werkzeug.exceptions import BadRequest, BadRequestKeyError, HTTPException, \
InternalServerError, MethodNotAllowed, default_exceptions
from werkzeug.routing import BuildError, Map, RequestRedirect, \
RoutingException, Rule
from . import cli, json
from ._compat import integer_types, reraise, string_types, text_type
from .config import Config, ConfigAttribute
from .ctx import AppContext, RequestContext, _AppCtxGlobals
from .globals import _request_ctx_stack, g, request, session
from .helpers import (
_PackageBoundObject,
_endpoint_from_view_func, find_package, get_env, get_debug_flag,
get_flashed_messages, locked_cached_property, url_for, get_load_dotenv
)
from .logging import create_logger
from .sessions import SecureCookieSessionInterface
from .signals import appcontext_tearing_down, got_request_exception, \
request_finished, request_started, request_tearing_down
from .templating import DispatchingJinjaLoader, Environment, \
_default_template_ctx_processor
from .wrappers import Request, Response
# a singleton sentinel value for parameter defaults
_sentinel = object()
def _make_timedelta(value):
if not isinstance(value, timedelta):
return timedelta(seconds=value)
return value
def setupmethod(f):
"""Wraps a method so that it performs a check in debug mode if the
first request was already handled.
"""
def wrapper_func(self, *args, **kwargs):
if self.debug and self._got_first_request:
raise AssertionError('A setup function was called after the '
'first request was handled. This usually indicates a bug '
'in the application where a module was not imported '
'and decorators or other functionality was called too late.\n'
'To fix this make sure to import all your view modules, '
'database models and everything related at a central place '
'before the application starts serving requests.')
return f(self, *args, **kwargs)
return update_wrapper(wrapper_func, f)
class Flask(_PackageBoundObject):
"""The flask object implements a WSGI application and acts as the central
object. It is passed the name of the module or package of the
application. Once it is created it will act as a central registry for
the view functions, the URL rules, template configuration and much more.
The name of the package is used to resolve resources from inside the
package or the folder the module is contained in depending on if the
package parameter resolves to an actual python package (a folder with
an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file).
For more information about resource loading, see :func:`open_resource`.
Usually you create a :class:`Flask` instance in your main module or
in the :file:`__init__.py` file of your package like this::
from flask import Flask
app = Flask(__name__)
.. admonition:: About the First Parameter
The idea of the first parameter is to give Flask an idea of what
belongs to your application. This name is used to find resources
on the filesystem, can be used by extensions to improve debugging
information and a lot more.
So it's important what you provide there. If you are using a single
module, `__name__` is always the correct value. If you however are
using a package, it's usually recommended to hardcode the name of
your package there.
For example if your application is defined in :file:`yourapplication/app.py`
you should create it with one of the two versions below::
app = Flask('yourapplication')
app = Flask(__name__.split('.')[0])
Why is that? The application will work even with `__name__`, thanks
to how resources are looked up. However it will make debugging more
painful. Certain extensions can make assumptions based on the
import name of your application. For example the Flask-SQLAlchemy
extension will look for the code in your application that triggered
an SQL query in debug mode. If the import name is not properly set
up, that debugging information is lost. (For example it would only
pick up SQL queries in `yourapplication.app` and not
`yourapplication.views.frontend`)
.. versionadded:: 0.7
The `static_url_path`, `static_folder`, and `template_folder`
parameters were added.
.. versionadded:: 0.8
The `instance_path` and `instance_relative_config` parameters were
added.
.. versionadded:: 0.11
The `root_path` parameter was added.
.. versionadded:: 1.0
The ``host_matching`` and ``static_host`` parameters were added.
.. versionadded:: 1.0
The ``subdomain_matching`` parameter was added. Subdomain
matching needs to be enabled manually now. Setting
:data:`SERVER_NAME` does not implicitly enable it.
:param import_name: the name of the application package
:param static_url_path: can be used to specify a different path for the
static files on the web. Defaults to the name
of the `static_folder` folder.
:param static_folder: the folder with static files that should be served
at `static_url_path`. Defaults to the ``'static'``
folder in the root path of the application.
:param static_host: the host to use when adding the static route.
Defaults to None. Required when using ``host_matching=True``
with a ``static_folder`` configured.
:param host_matching: set ``url_map.host_matching`` attribute.
Defaults to False.
:param subdomain_matching: consider the subdomain relative to
:data:`SERVER_NAME` when matching routes. Defaults to False.
:param template_folder: the folder that contains the templates that should
be used by the application. Defaults to
``'templates'`` folder in the root path of the
application.
:param instance_path: An alternative instance path for the application.
By default the folder ``'instance'`` next to the
package or module is assumed to be the instance
path.
:param instance_relative_config: if set to ``True`` relative filenames
for loading the config are assumed to
be relative to the instance path instead
of the application root.
:param root_path: Flask by default will automatically calculate the path
to the root of the application. In certain situations
this cannot be achieved (for instance if the package
is a Python 3 namespace package) and needs to be
manually defined.
"""
#: The class that is used for request objects. See :class:`~flask.Request`
#: for more information.
request_class = Request
#: The class that is used for response objects. See
#: :class:`~flask.Response` for more information.
response_class = Response
#: The class that is used for the Jinja environment.
#:
#: .. versionadded:: 0.11
jinja_environment = Environment
#: The class that is used for the :data:`~flask.g` instance.
#:
#: Example use cases for a custom class:
#:
#: 1. Store arbitrary attributes on flask.g.
#: 2. Add a property for lazy per-request database connectors.
#: 3. Return None instead of AttributeError on unexpected attributes.
#: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g.
#:
#: In Flask 0.9 this property was called `request_globals_class` but it
#: was changed in 0.10 to :attr:`app_ctx_globals_class` because the
#: flask.g object is now application context scoped.
#:
#: .. versionadded:: 0.10
app_ctx_globals_class = _AppCtxGlobals
#: The class that is used for the ``config`` attribute of this app.
#: Defaults to :class:`~flask.Config`.
#:
#: Example use cases for a custom class:
#:
#: 1. Default values for certain config options.
#: 2. Access to config values through attributes in addition to keys.
#:
#: .. versionadded:: 0.11
config_class = Config
#: The testing flag. Set this to ``True`` to enable the test mode of
#: Flask extensions (and in the future probably also Flask itself).
#: For example this might activate test helpers that have an
#: additional runtime cost which should not be enabled by default.
#:
#: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the
#: default it's implicitly enabled.
#:
#: This attribute can also be configured from the config with the
#: ``TESTING`` configuration key. Defaults to ``False``.
testing = ConfigAttribute('TESTING')
#: If a secret key is set, cryptographic components can use this to
#: sign cookies and other things. Set this to a complex random value
#: when you want to use the secure cookie for instance.
#:
#: This attribute can also be configured from the config with the
#: :data:`SECRET_KEY` configuration key. Defaults to ``None``.
secret_key = ConfigAttribute('SECRET_KEY')
#: The secure cookie uses this for the name of the session cookie.
#:
#: This attribute can also be configured from the config with the
#: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'``
session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME')
#: A :class:`~datetime.timedelta` which is used to set the expiration
#: date of a permanent session. The default is 31 days which makes a
#: permanent session survive for roughly one month.
#:
#: This attribute can also be configured from the config with the
#: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to
#: ``timedelta(days=31)``
permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME',
get_converter=_make_timedelta)
#: A :class:`~datetime.timedelta` which is used as default cache_timeout
#: for the :func:`send_file` functions. The default is 12 hours.
#:
#: This attribute can also be configured from the config with the
#: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration
#: variable can also be set with an integer value used as seconds.
#: Defaults to ``timedelta(hours=12)``
send_file_max_age_default = ConfigAttribute('SEND_FILE_MAX_AGE_DEFAULT',
get_converter=_make_timedelta)
#: Enable this if you want to use the X-Sendfile feature. Keep in
#: mind that the server has to support this. This only affects files
#: sent with the :func:`send_file` method.
#:
#: .. versionadded:: 0.2
#:
#: This attribute can also be configured from the config with the
#: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``.
use_x_sendfile = ConfigAttribute('USE_X_SENDFILE')
#: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`.
#:
#: .. versionadded:: 0.10
json_encoder = json.JSONEncoder
#: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`.
#:
#: .. versionadded:: 0.10
json_decoder = json.JSONDecoder
#: Options that are passed directly to the Jinja2 environment.
jinja_options = ImmutableDict(
extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_']
)
#: Default configuration parameters.
default_config = ImmutableDict({
'ENV': None,
'DEBUG': None,
'TESTING': False,
'PROPAGATE_EXCEPTIONS': None,
'PRESERVE_CONTEXT_ON_EXCEPTION': None,
'SECRET_KEY': None,
'PERMANENT_SESSION_LIFETIME': timedelta(days=31),
'USE_X_SENDFILE': False,
'SERVER_NAME': None,
'APPLICATION_ROOT': '/',
'SESSION_COOKIE_NAME': 'session',
'SESSION_COOKIE_DOMAIN': None,
'SESSION_COOKIE_PATH': None,
'SESSION_COOKIE_HTTPONLY': True,
'SESSION_COOKIE_SECURE': False,
'SESSION_COOKIE_SAMESITE': None,
'SESSION_REFRESH_EACH_REQUEST': True,
'MAX_CONTENT_LENGTH': None,
'SEND_FILE_MAX_AGE_DEFAULT': timedelta(hours=12),
'TRAP_BAD_REQUEST_ERRORS': None,
'TRAP_HTTP_EXCEPTIONS': False,
'EXPLAIN_TEMPLATE_LOADING': False,
'PREFERRED_URL_SCHEME': 'http',
'JSON_AS_ASCII': True,
'JSON_SORT_KEYS': True,
'JSONIFY_PRETTYPRINT_REGULAR': False,
'JSONIFY_MIMETYPE': 'application/json',
'TEMPLATES_AUTO_RELOAD': None,
'MAX_COOKIE_SIZE': 4093,
})
#: The rule object to use for URL rules created. This is used by
#: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`.
#:
#: .. versionadded:: 0.7
url_rule_class = Rule
#: the test client that is used with when `test_client` is used.
#:
#: .. versionadded:: 0.7
test_client_class = None
#: The :class:`~click.testing.CliRunner` subclass, by default
#: :class:`~flask.testing.FlaskCliRunner` that is used by
#: :meth:`test_cli_runner`. Its ``__init__`` method should take a
#: Flask app object as the first argument.
#:
#: .. versionadded:: 1.0
test_cli_runner_class = None
#: the session interface to use. By default an instance of
#: :class:`~flask.sessions.SecureCookieSessionInterface` is used here.
#:
#: .. versionadded:: 0.8
session_interface = SecureCookieSessionInterface()
# TODO remove the next three attrs when Sphinx :inherited-members: works
# https://github.com/sphinx-doc/sphinx/issues/741
#: The name of the package or module that this app belongs to. Do not
#: change this once it is set by the constructor.
import_name = None
#: Location of the template files to be added to the template lookup.
#: ``None`` if templates should not be added.
template_folder = None
#: Absolute path to the package on the filesystem. Used to look up
#: resources contained in the package.
root_path = None
def __init__(
self,
import_name,
static_url_path=None,
static_folder='static',
static_host=None,
host_matching=False,
subdomain_matching=False,
template_folder='templates',
instance_path=None,
instance_relative_config=False,
root_path=None
):
_PackageBoundObject.__init__(
self,
import_name,
template_folder=template_folder,
root_path=root_path
)
if static_url_path is not None:
self.static_url_path = static_url_path
if static_folder is not None:
self.static_folder = static_folder
if instance_path is None:
instance_path = self.auto_find_instance_path()
elif not os.path.isabs(instance_path):
raise ValueError(
'If an instance path is provided it must be absolute.'
' A relative path was given instead.'
)
#: Holds the path to the instance folder.
#:
#: .. versionadded:: 0.8
self.instance_path = instance_path
#: The configuration dictionary as :class:`Config`. This behaves
#: exactly like a regular dictionary but supports additional methods
#: to load a config from files.
self.config = self.make_config(instance_relative_config)
#: A dictionary of all view functions registered. The keys will
#: be function names which are also used to generate URLs and
#: the values are the function objects themselves.
#: To register a view function, use the :meth:`route` decorator.
self.view_functions = {}
#: A dictionary of all registered error handlers. The key is ``None``
#: for error handlers active on the application, otherwise the key is
#: the name of the blueprint. Each key points to another dictionary
#: where the key is the status code of the http exception. The
#: special key ``None`` points to a list of tuples where the first item
#: is the class for the instance check and the second the error handler
#: function.
#:
#: To register an error handler, use the :meth:`errorhandler`
#: decorator.
self.error_handler_spec = {}
#: A list of functions that are called when :meth:`url_for` raises a
#: :exc:`~werkzeug.routing.BuildError`. Each function registered here
#: is called with `error`, `endpoint` and `values`. If a function
#: returns ``None`` or raises a :exc:`BuildError` the next function is
#: tried.
#:
#: .. versionadded:: 0.9
self.url_build_error_handlers = []
#: A dictionary with lists of functions that will be called at the
#: beginning of each request. The key of the dictionary is the name of
#: the blueprint this function is active for, or ``None`` for all
#: requests. To register a function, use the :meth:`before_request`
#: decorator.
self.before_request_funcs = {}
#: A list of functions that will be called at the beginning of the
#: first request to this instance. To register a function, use the
#: :meth:`before_first_request` decorator.
#:
#: .. versionadded:: 0.8
self.before_first_request_funcs = []
#: A dictionary with lists of functions that should be called after
#: each request. The key of the dictionary is the name of the blueprint
#: this function is active for, ``None`` for all requests. This can for
#: example be used to close database connections. To register a function
#: here, use the :meth:`after_request` decorator.
self.after_request_funcs = {}
#: A dictionary with lists of functions that are called after
#: each request, even if an exception has occurred. The key of the
#: dictionary is the name of the blueprint this function is active for,
#: ``None`` for all requests. These functions are not allowed to modify
#: the request, and their return values are ignored. If an exception
#: occurred while processing the request, it gets passed to each
#: teardown_request function. To register a function here, use the
#: :meth:`teardown_request` decorator.
#:
#: .. versionadded:: 0.7
self.teardown_request_funcs = {}
#: A list of functions that are called when the application context
#: is destroyed. Since the application context is also torn down
#: if the request ends this is the place to store code that disconnects
#: from databases.
#:
#: .. versionadded:: 0.9
self.teardown_appcontext_funcs = []
#: A dictionary with lists of functions that are called before the
#: :attr:`before_request_funcs` functions. The key of the dictionary is
#: the name of the blueprint this function is active for, or ``None``
#: for all requests. To register a function, use
#: :meth:`url_value_preprocessor`.
#:
#: .. versionadded:: 0.7
self.url_value_preprocessors = {}
#: A dictionary with lists of functions that can be used as URL value
#: preprocessors. The key ``None`` here is used for application wide
#: callbacks, otherwise the key is the name of the blueprint.
#: Each of these functions has the chance to modify the dictionary
#: of URL values before they are used as the keyword arguments of the
#: view function. For each function registered this one should also
#: provide a :meth:`url_defaults` function that adds the parameters
#: automatically again that were removed that way.
#:
#: .. versionadded:: 0.7
self.url_default_functions = {}
#: A dictionary with list of functions that are called without argument
#: to populate the template context. The key of the dictionary is the
#: name of the blueprint this function is active for, ``None`` for all
#: requests. Each returns a dictionary that the template context is
#: updated with. To register a function here, use the
#: :meth:`context_processor` decorator.
self.template_context_processors = {
None: [_default_template_ctx_processor]
}
#: A list of shell context processor functions that should be run
#: when a shell context is created.
#:
#: .. versionadded:: 0.11
self.shell_context_processors = []
#: all the attached blueprints in a dictionary by name. Blueprints
#: can be attached multiple times so this dictionary does not tell
#: you how often they got attached.
#:
#: .. versionadded:: 0.7
self.blueprints = {}
self._blueprint_order = []
#: a place where extensions can store application specific state. For
#: example this is where an extension could store database engines and
#: similar things. For backwards compatibility extensions should register
#: themselves like this::
#:
#: if not hasattr(app, 'extensions'):
#: app.extensions = {}
#: app.extensions['extensionname'] = SomeObject()
#:
#: The key must match the name of the extension module. For example in
#: case of a "Flask-Foo" extension in `flask_foo`, the key would be
#: ``'foo'``.
#:
#: .. versionadded:: 0.7
self.extensions = {}
#: The :class:`~werkzeug.routing.Map` for this instance. You can use
#: this to change the routing converters after the class was created
#: but before any routes are connected. Example::
#:
#: from werkzeug.routing import BaseConverter
#:
#: class ListConverter(BaseConverter):
#: def to_python(self, value):
#: return value.split(',')
#: def to_url(self, values):
#: return ','.join(super(ListConverter, self).to_url(value)
#: for value in values)
#:
#: app = Flask(__name__)
#: app.url_map.converters['list'] = ListConverter
self.url_map = Map()
self.url_map.host_matching = host_matching
self.subdomain_matching = subdomain_matching
# tracks internally if the application already handled at least one
# request.
self._got_first_request = False
self._before_request_lock = Lock()
# Add a static route using the provided static_url_path, static_host,
# and static_folder if there is a configured static_folder.
# Note we do this without checking if static_folder exists.
# For one, it might be created while the server is running (e.g. during
# development). Also, Google App Engine stores static files somewhere
if self.has_static_folder:
assert bool(static_host) == host_matching, 'Invalid static_host/host_matching combination'
self.add_url_rule(
self.static_url_path + '/<path:filename>',
endpoint='static',
host=static_host,
view_func=self.send_static_file
)
#: The click command line context for this application. Commands
#: registered here show up in the :command:`flask` command once the
#: application has been discovered. The default commands are
#: provided by Flask itself and can be overridden.
#:
#: This is an instance of a :class:`click.Group` object.
self.cli = cli.AppGroup(self.name)
@locked_cached_property
def name(self):
"""The name of the application. This is usually the import name
with the difference that it's guessed from the run file if the
import name is main. This name is used as a display name when
Flask needs the name of the application. It can be set and overridden
to change the value.
.. versionadded:: 0.8
"""
if self.import_name == '__main__':
fn = getattr(sys.modules['__main__'], '__file__', None)
if fn is None:
return '__main__'
return os.path.splitext(os.path.basename(fn))[0]
return self.import_name
@property
def propagate_exceptions(self):
"""Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration
value in case it's set, otherwise a sensible default is returned.
.. versionadded:: 0.7
"""
rv = self.config['PROPAGATE_EXCEPTIONS']
if rv is not None:
return rv
return self.testing or self.debug
@property
def preserve_context_on_exception(self):
"""Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION``
configuration value in case it's set, otherwise a sensible default
is returned.
.. versionadded:: 0.7
"""
rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION']
if rv is not None:
return rv
return self.debug
@locked_cached_property
def logger(self):
"""The ``'flask.app'`` logger, a standard Python
:class:`~logging.Logger`.
In debug mode, the logger's :attr:`~logging.Logger.level` will be set
to :data:`~logging.DEBUG`.
If there are no handlers configured, a default handler will be added.
See :ref:`logging` for more information.
.. versionchanged:: 1.0
Behavior was simplified. The logger is always named
``flask.app``. The level is only set during configuration, it
doesn't check ``app.debug`` each time. Only one format is used,
not different ones depending on ``app.debug``. No handlers are
removed, and a handler is only added if no handlers are already
configured.
.. versionadded:: 0.3
"""
return create_logger(self)
@locked_cached_property
def jinja_env(self):
"""The Jinja2 environment used to load templates."""
return self.create_jinja_environment()
@property
def got_first_request(self):
"""This attribute is set to ``True`` if the application started
handling the first request.
.. versionadded:: 0.8
"""
return self._got_first_request
def make_config(self, instance_relative=False):
"""Used to create the config attribute by the Flask constructor.
The `instance_relative` parameter is passed in from the constructor
of Flask (there named `instance_relative_config`) and indicates if
the config should be relative to the instance path or the root path
of the application.
.. versionadded:: 0.8
"""
root_path = self.root_path
if instance_relative:
root_path = self.instance_path
defaults = dict(self.default_config)
defaults['ENV'] = get_env()
defaults['DEBUG'] = get_debug_flag()
return self.config_class(root_path, defaults)
def auto_find_instance_path(self):
"""Tries to locate the instance path if it was not provided to the
constructor of the application class. It will basically calculate
the path to a folder named ``instance`` next to your main file or
the package.
.. versionadded:: 0.8
"""
prefix, package_path = find_package(self.import_name)
if prefix is None:
return os.path.join(package_path, 'instance')
return os.path.join(prefix, 'var', self.name + '-instance')
def open_instance_resource(self, resource, mode='rb'):
"""Opens a resource from the application's instance folder
(:attr:`instance_path`). Otherwise works like
:meth:`open_resource`. Instance resources can also be opened for
writing.
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
return open(os.path.join(self.instance_path, resource), mode)
def _get_templates_auto_reload(self):
"""Reload templates when they are changed. Used by
:meth:`create_jinja_environment`.
This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If
not set, it will be enabled in debug mode.
.. versionadded:: 1.0
This property was added but the underlying config and behavior
already existed.
"""
rv = self.config['TEMPLATES_AUTO_RELOAD']
return rv if rv is not None else self.debug
def _set_templates_auto_reload(self, value):
self.config['TEMPLATES_AUTO_RELOAD'] = value
templates_auto_reload = property(
_get_templates_auto_reload, _set_templates_auto_reload
)
del _get_templates_auto_reload, _set_templates_auto_reload
def create_jinja_environment(self):
"""Creates the Jinja2 environment based on :attr:`jinja_options`
and :meth:`select_jinja_autoescape`. Since 0.7 this also adds
the Jinja2 globals and filters after initialization. Override
this function to customize the behavior.
.. versionadded:: 0.5
.. versionchanged:: 0.11
``Environment.auto_reload`` set in accordance with
``TEMPLATES_AUTO_RELOAD`` configuration option.
"""
options = dict(self.jinja_options)
if 'autoescape' not in options:
options['autoescape'] = self.select_jinja_autoescape
if 'auto_reload' not in options:
options['auto_reload'] = self.templates_auto_reload
rv = self.jinja_environment(self, **options)
rv.globals.update(
url_for=url_for,
get_flashed_messages=get_flashed_messages,
config=self.config,
# request, session and g are normally added with the
# context processor for efficiency reasons but for imported
# templates we also want the proxies in there.
request=request,
session=session,
g=g
)
rv.filters['tojson'] = json.tojson_filter
return rv
def create_global_jinja_loader(self):
"""Creates the loader for the Jinja2 environment. Can be used to
override just the loader and keeping the rest unchanged. It's
discouraged to override this function. Instead one should override
the :meth:`jinja_loader` function instead.
The global loader dispatches between the loaders of the application
and the individual blueprints.
.. versionadded:: 0.7
"""
return DispatchingJinjaLoader(self)
def select_jinja_autoescape(self, filename):
"""Returns ``True`` if autoescaping should be active for the given
template name. If no template name is given, returns `True`.
.. versionadded:: 0.5
"""
if filename is None:
return True
return filename.endswith(('.html', '.htm', '.xml', '.xhtml'))
def update_template_context(self, context):
"""Update the template context with some commonly used variables.
This injects request, session, config and g into the template
context as well as everything template context processors want
to inject. Note that the as of Flask 0.6, the original values
in the context will not be overridden if a context processor
decides to return a value with the same key.
:param context: the context as a dictionary that is updated in place
to add extra variables.
"""
funcs = self.template_context_processors[None]
reqctx = _request_ctx_stack.top
if reqctx is not None:
bp = reqctx.request.blueprint
if bp is not None and bp in self.template_context_processors:
funcs = chain(funcs, self.template_context_processors[bp])
orig_ctx = context.copy()
for func in funcs:
context.update(func())
# make sure the original values win. This makes it possible to
# easier add new variables in context processors without breaking
# existing views.
context.update(orig_ctx)
def make_shell_context(self):
"""Returns the shell context for an interactive shell for this
application. This runs all the registered shell context
processors.
.. versionadded:: 0.11
"""
rv = {'app': self, 'g': g}
for processor in self.shell_context_processors:
rv.update(processor())
return rv
#: What environment the app is running in. Flask and extensions may
#: enable behaviors based on the environment, such as enabling debug
#: mode. This maps to the :data:`ENV` config key. This is set by the
#: :envvar:`FLASK_ENV` environment variable and may not behave as
#: expected if set in code.
#:
#: **Do not enable development when deploying in production.**
#:
#: Default: ``'production'``
env = ConfigAttribute('ENV')
def _get_debug(self):
return self.config['DEBUG']
def _set_debug(self, value):
self.config['DEBUG'] = value
self.jinja_env.auto_reload = self.templates_auto_reload
#: Whether debug mode is enabled. When using ``flask run`` to start
#: the development server, an interactive debugger will be shown for
#: unhandled exceptions, and the server will be reloaded when code
#: changes. This maps to the :data:`DEBUG` config key. This is
#: enabled when :attr:`env` is ``'development'`` and is overridden
#: by the ``FLASK_DEBUG`` environment variable. It may not behave as
#: expected if set in code.
#:
#: **Do not enable debug mode when deploying in production.**
#:
#: Default: ``True`` if :attr:`env` is ``'development'``, or
#: ``False`` otherwise.
debug = property(_get_debug, _set_debug)
del _get_debug, _set_debug
def run(self, host=None, port=None, debug=None,
load_dotenv=True, **options):
"""Runs the application on a local development server.
Do not use ``run()`` in a production setting. It is not intended to
meet security and performance requirements for a production server.
Instead, see :ref:`deployment` for WSGI server recommendations.
If the :attr:`debug` flag is set the server will automatically reload
for code changes and show a debugger in case an exception happened.
If you want to run the application in debug mode, but disable the
code execution on the interactive debugger, you can pass
``use_evalex=False`` as parameter. This will keep the debugger's
traceback screen active, but disable code execution.
It is not recommended to use this function for development with
automatic reloading as this is badly supported. Instead you should
be using the :command:`flask` command line script's ``run`` support.
.. admonition:: Keep in Mind
Flask will suppress any server error with a generic error page
unless it is in debug mode. As such to enable just the
interactive debugger without the code reloading, you have to
invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``.
Setting ``use_debugger`` to ``True`` without being in debug mode
won't catch any exceptions because there won't be any to
catch.
:param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to
have the server available externally as well. Defaults to
``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable
if present.
:param port: the port of the webserver. Defaults to ``5000`` or the
port defined in the ``SERVER_NAME`` config variable if present.
:param debug: if given, enable or disable debug mode. See
:attr:`debug`.
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv`
files to set environment variables. Will also change the working
directory to the directory containing the first file found.
:param options: the options to be forwarded to the underlying Werkzeug
server. See :func:`werkzeug.serving.run_simple` for more
information.
.. versionchanged:: 1.0
If installed, python-dotenv will be used to load environment
variables from :file:`.env` and :file:`.flaskenv` files.
If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG`
environment variables will override :attr:`env` and
:attr:`debug`.
Threaded mode is enabled by default.
.. versionchanged:: 0.10
The default port is now picked from the ``SERVER_NAME``
variable.
"""
# Change this into a no-op if the server is invoked from the
# command line. Have a look at cli.py for more information.
if os.environ.get('FLASK_RUN_FROM_CLI') == 'true':
from .debughelpers import explain_ignored_app_run
explain_ignored_app_run()
return
if get_load_dotenv(load_dotenv):
cli.load_dotenv()
# if set, let env vars override previous values
if 'FLASK_ENV' in os.environ:
self.env = get_env()
self.debug = get_debug_flag()
elif 'FLASK_DEBUG' in os.environ:
self.debug = get_debug_flag()
# debug passed to method overrides all other sources
if debug is not None:
self.debug = bool(debug)
_host = '127.0.0.1'
_port = 5000
server_name = self.config.get('SERVER_NAME')
sn_host, sn_port = None, None
if server_name:
sn_host, _, sn_port = server_name.partition(':')
host = host or sn_host or _host
port = int(port or sn_port or _port)
options.setdefault('use_reloader', self.debug)
options.setdefault('use_debugger', self.debug)
options.setdefault('threaded', True)
cli.show_server_banner(self.env, self.debug, self.name, False)
from werkzeug.serving import run_simple
try:
run_simple(host, port, self, **options)
finally:
# reset the first request information if the development server
# reset normally. This makes it possible to restart the server
# without reloader and that stuff from an interactive shell.
self._got_first_request = False
def test_client(self, use_cookies=True, **kwargs):
"""Creates a test client for this application. For information
about unit testing head over to :ref:`testing`.
Note that if you are testing for assertions or exceptions in your
application code, you must set ``app.testing = True`` in order for the
exceptions to propagate to the test client. Otherwise, the exception
will be handled by the application (not visible to the test client) and
the only indication of an AssertionError or other exception will be a
500 status code response to the test client. See the :attr:`testing`
attribute. For example::
app.testing = True
client = app.test_client()
The test client can be used in a ``with`` block to defer the closing down
of the context until the end of the ``with`` block. This is useful if
you want to access the context locals for testing::
with app.test_client() as c:
rv = c.get('/?vodka=42')
assert request.args['vodka'] == '42'
Additionally, you may pass optional keyword arguments that will then
be passed to the application's :attr:`test_client_class` constructor.
For example::
from flask.testing import FlaskClient
class CustomClient(FlaskClient):
def __init__(self, *args, **kwargs):
self._authentication = kwargs.pop("authentication")
super(CustomClient,self).__init__( *args, **kwargs)
app.test_client_class = CustomClient
client = app.test_client(authentication='Basic ....')
See :class:`~flask.testing.FlaskClient` for more information.
.. versionchanged:: 0.4
added support for ``with`` block usage for the client.
.. versionadded:: 0.7
The `use_cookies` parameter was added as well as the ability
to override the client to be used by setting the
:attr:`test_client_class` attribute.
.. versionchanged:: 0.11
Added `**kwargs` to support passing additional keyword arguments to
the constructor of :attr:`test_client_class`.
"""
cls = self.test_client_class
if cls is None:
from flask.testing import FlaskClient as cls
return cls(self, self.response_class, use_cookies=use_cookies, **kwargs)
def test_cli_runner(self, **kwargs):
"""Create a CLI runner for testing CLI commands.
See :ref:`testing-cli`.
Returns an instance of :attr:`test_cli_runner_class`, by default
:class:`~flask.testing.FlaskCliRunner`. The Flask app object is
passed as the first argument.
.. versionadded:: 1.0
"""
cls = self.test_cli_runner_class
if cls is None:
from flask.testing import FlaskCliRunner as cls
return cls(self, **kwargs)
def open_session(self, request):
"""Creates or opens a new session. Default implementation stores all
session data in a signed cookie. This requires that the
:attr:`secret_key` is set. Instead of overriding this method
we recommend replacing the :class:`session_interface`.
.. deprecated: 1.0
Will be removed in 1.1. Use ``session_interface.open_session``
instead.
:param request: an instance of :attr:`request_class`.
"""
warnings.warn(DeprecationWarning(
'"open_session" is deprecated and will be removed in 1.1. Use'
' "session_interface.open_session" instead.'
))
return self.session_interface.open_session(self, request)
def save_session(self, session, response):
"""Saves the session if it needs updates. For the default
implementation, check :meth:`open_session`. Instead of overriding this
method we recommend replacing the :class:`session_interface`.
.. deprecated: 1.0
Will be removed in 1.1. Use ``session_interface.save_session``
instead.
:param session: the session to be saved (a
:class:`~werkzeug.contrib.securecookie.SecureCookie`
object)
:param response: an instance of :attr:`response_class`
"""
warnings.warn(DeprecationWarning(
'"save_session" is deprecated and will be removed in 1.1. Use'
' "session_interface.save_session" instead.'
))
return self.session_interface.save_session(self, session, response)
def make_null_session(self):
"""Creates a new instance of a missing session. Instead of overriding
this method we recommend replacing the :class:`session_interface`.
.. deprecated: 1.0
Will be removed in 1.1. Use ``session_interface.make_null_session``
instead.
.. versionadded:: 0.7
"""
warnings.warn(DeprecationWarning(
'"make_null_session" is deprecated and will be removed in 1.1. Use'
' "session_interface.make_null_session" instead.'
))
return self.session_interface.make_null_session(self)
@setupmethod
def register_blueprint(self, blueprint, **options):
"""Register a :class:`~flask.Blueprint` on the application. Keyword
arguments passed to this method will override the defaults set on the
blueprint.
Calls the blueprint's :meth:`~flask.Blueprint.register` method after
recording the blueprint in the application's :attr:`blueprints`.
:param blueprint: The blueprint to register.
:param url_prefix: Blueprint routes will be prefixed with this.
:param subdomain: Blueprint routes will match on this subdomain.
:param url_defaults: Blueprint routes will use these default values for
view arguments.
:param options: Additional keyword arguments are passed to
:class:`~flask.blueprints.BlueprintSetupState`. They can be
accessed in :meth:`~flask.Blueprint.record` callbacks.
.. versionadded:: 0.7
"""
first_registration = False
if blueprint.name in self.blueprints:
assert self.blueprints[blueprint.name] is blueprint, (
'A name collision occurred between blueprints %r and %r. Both'
' share the same name "%s". Blueprints that are created on the'
' fly need unique names.' % (
blueprint, self.blueprints[blueprint.name], blueprint.name
)
)
else:
self.blueprints[blueprint.name] = blueprint
self._blueprint_order.append(blueprint)
first_registration = True
blueprint.register(self, options, first_registration)
def iter_blueprints(self):
"""Iterates over all blueprints by the order they were registered.
.. versionadded:: 0.11
"""
return iter(self._blueprint_order)
@setupmethod
def add_url_rule(self, rule, endpoint=None, view_func=None,
provide_automatic_options=None, **options):
"""Connects a URL rule. Works exactly like the :meth:`route`
decorator. If a view_func is provided it will be registered with the
endpoint.
Basically this example::
@app.route('/')
def index():
pass
Is equivalent to the following::
def index():
pass
app.add_url_rule('/', 'index', index)
If the view_func is not provided you will need to connect the endpoint
to a view function like so::
app.view_functions['index'] = index
Internally :meth:`route` invokes :meth:`add_url_rule` so if you want
to customize the behavior via subclassing you only need to change
this method.
For more information refer to :ref:`url-route-registrations`.
.. versionchanged:: 0.2
`view_func` parameter added.
.. versionchanged:: 0.6
``OPTIONS`` is added automatically as method.
:param rule: the URL rule as string
:param endpoint: the endpoint for the registered URL rule. Flask
itself assumes the name of the view function as
endpoint
:param view_func: the function to call when serving a request to the
provided endpoint
:param provide_automatic_options: controls whether the ``OPTIONS``
method should be added automatically. This can also be controlled
by setting the ``view_func.provide_automatic_options = False``
before adding the rule.
:param options: the options to be forwarded to the underlying
:class:`~werkzeug.routing.Rule` object. A change
to Werkzeug is handling of method options. methods
is a list of methods this rule should be limited
to (``GET``, ``POST`` etc.). By default a rule
just listens for ``GET`` (and implicitly ``HEAD``).
Starting with Flask 0.6, ``OPTIONS`` is implicitly
added and handled by the standard request handling.
"""
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
options['endpoint'] = endpoint
methods = options.pop('methods', None)
# if the methods are not given and the view_func object knows its
# methods we can use that instead. If neither exists, we go with
# a tuple of only ``GET`` as default.
if methods is None:
methods = getattr(view_func, 'methods', None) or ('GET',)
if isinstance(methods, string_types):
raise TypeError('Allowed methods have to be iterables of strings, '
'for example: @app.route(..., methods=["POST"])')
methods = set(item.upper() for item in methods)
# Methods that should always be added
required_methods = set(getattr(view_func, 'required_methods', ()))
# starting with Flask 0.8 the view_func object can disable and
# force-enable the automatic options handling.
if provide_automatic_options is None:
provide_automatic_options = getattr(view_func,
'provide_automatic_options', None)
if provide_automatic_options is None:
if 'OPTIONS' not in methods:
provide_automatic_options = True
required_methods.add('OPTIONS')
else:
provide_automatic_options = False
# Add the required methods now.
methods |= required_methods
rule = self.url_rule_class(rule, methods=methods, **options)
rule.provide_automatic_options = provide_automatic_options
self.url_map.add(rule)
if view_func is not None:
old_func = self.view_functions.get(endpoint)
if old_func is not None and old_func != view_func:
raise AssertionError('View function mapping is overwriting an '
'existing endpoint function: %s' % endpoint)
self.view_functions[endpoint] = view_func
def route(self, rule, **options):
"""A decorator that is used to register a view function for a
given URL rule. This does the same thing as :meth:`add_url_rule`
but is intended for decorator usage::
@app.route('/')
def index():
return 'Hello World'
For more information refer to :ref:`url-route-registrations`.
:param rule: the URL rule as string
:param endpoint: the endpoint for the registered URL rule. Flask
itself assumes the name of the view function as
endpoint
:param options: the options to be forwarded to the underlying
:class:`~werkzeug.routing.Rule` object. A change
to Werkzeug is handling of method options. methods
is a list of methods this rule should be limited
to (``GET``, ``POST`` etc.). By default a rule
just listens for ``GET`` (and implicitly ``HEAD``).
Starting with Flask 0.6, ``OPTIONS`` is implicitly
added and handled by the standard request handling.
"""
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
@setupmethod
def endpoint(self, endpoint):
"""A decorator to register a function as an endpoint.
Example::
@app.endpoint('example.endpoint')
def example():
return "example"
:param endpoint: the name of the endpoint
"""
def decorator(f):
self.view_functions[endpoint] = f
return f
return decorator
@staticmethod
def _get_exc_class_and_code(exc_class_or_code):
"""Ensure that we register only exceptions as handler keys"""
if isinstance(exc_class_or_code, integer_types):
exc_class = default_exceptions[exc_class_or_code]
else:
exc_class = exc_class_or_code
assert issubclass(exc_class, Exception)
if issubclass(exc_class, HTTPException):
return exc_class, exc_class.code
else:
return exc_class, None
@setupmethod
def errorhandler(self, code_or_exception):
"""Register a function to handle errors by code or exception class.
A decorator that is used to register a function given an
error code. Example::
@app.errorhandler(404)
def page_not_found(error):
return 'This page does not exist', 404
You can also register handlers for arbitrary exceptions::
@app.errorhandler(DatabaseError)
def special_exception_handler(error):
return 'Database connection failed', 500
.. versionadded:: 0.7
Use :meth:`register_error_handler` instead of modifying
:attr:`error_handler_spec` directly, for application wide error
handlers.
.. versionadded:: 0.7
One can now additionally also register custom exception types
that do not necessarily have to be a subclass of the
:class:`~werkzeug.exceptions.HTTPException` class.
:param code_or_exception: the code as integer for the handler, or
an arbitrary exception
"""
def decorator(f):
self._register_error_handler(None, code_or_exception, f)
return f
return decorator
@setupmethod
def register_error_handler(self, code_or_exception, f):
"""Alternative error attach function to the :meth:`errorhandler`
decorator that is more straightforward to use for non decorator
usage.
.. versionadded:: 0.7
"""
self._register_error_handler(None, code_or_exception, f)
@setupmethod
def _register_error_handler(self, key, code_or_exception, f):
"""
:type key: None|str
:type code_or_exception: int|T<=Exception
:type f: callable
"""
if isinstance(code_or_exception, HTTPException): # old broken behavior
raise ValueError(
'Tried to register a handler for an exception instance {0!r}.'
' Handlers can only be registered for exception classes or'
' HTTP error codes.'.format(code_or_exception)
)
try:
exc_class, code = self._get_exc_class_and_code(code_or_exception)
except KeyError:
raise KeyError(
"'{0}' is not a recognized HTTP error code. Use a subclass of"
" HTTPException with that code instead.".format(code_or_exception)
)
handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {})
handlers[exc_class] = f
@setupmethod
def template_filter(self, name=None):
"""A decorator that is used to register custom template filter.
You can specify a name for the filter, otherwise the function
name will be used. Example::
@app.template_filter()
def reverse(s):
return s[::-1]
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_filter(f, name=name)
return f
return decorator
@setupmethod
def add_template_filter(self, f, name=None):
"""Register a custom template filter. Works exactly like the
:meth:`template_filter` decorator.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
self.jinja_env.filters[name or f.__name__] = f
@setupmethod
def template_test(self, name=None):
"""A decorator that is used to register custom template test.
You can specify a name for the test, otherwise the function
name will be used. Example::
@app.template_test()
def is_prime(n):
if n == 2:
return True
for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
if n % i == 0:
return False
return True
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_test(f, name=name)
return f
return decorator
@setupmethod
def add_template_test(self, f, name=None):
"""Register a custom template test. Works exactly like the
:meth:`template_test` decorator.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
self.jinja_env.tests[name or f.__name__] = f
@setupmethod
def template_global(self, name=None):
"""A decorator that is used to register a custom template global function.
You can specify a name for the global function, otherwise the function
name will be used. Example::
@app.template_global()
def double(n):
return 2 * n
.. versionadded:: 0.10
:param name: the optional name of the global function, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_global(f, name=name)
return f
return decorator
@setupmethod
def add_template_global(self, f, name=None):
"""Register a custom template global function. Works exactly like the
:meth:`template_global` decorator.
.. versionadded:: 0.10
:param name: the optional name of the global function, otherwise the
function name will be used.
"""
self.jinja_env.globals[name or f.__name__] = f
@setupmethod
def before_request(self, f):
"""Registers a function to run before each request.
For example, this can be used to open a database connection, or to load
the logged in user from the session.
The function will be called without any arguments. If it returns a
non-None value, the value is handled as if it was the return value from
the view, and further request handling is stopped.
"""
self.before_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def before_first_request(self, f):
"""Registers a function to be run before the first request to this
instance of the application.
The function will be called without any arguments and its return
value is ignored.
.. versionadded:: 0.8
"""
self.before_first_request_funcs.append(f)
return f
@setupmethod
def after_request(self, f):
"""Register a function to be run after each request.
Your function must take one parameter, an instance of
:attr:`response_class` and return a new response object or the
same (see :meth:`process_response`).
As of Flask 0.7 this function might not be executed at the end of the
request in case an unhandled exception occurred.
"""
self.after_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def teardown_request(self, f):
"""Register a function to be run at the end of each request,
regardless of whether there was an exception or not. These functions
are executed when the request context is popped, even if not an
actual request was performed.
Example::
ctx = app.test_request_context()
ctx.push()
...
ctx.pop()
When ``ctx.pop()`` is executed in the above example, the teardown
functions are called just before the request context moves from the
stack of active contexts. This becomes relevant if you are using
such constructs in tests.
Generally teardown functions must take every necessary step to avoid
that they will fail. If they do execute code that might fail they
will have to surround the execution of these code by try/except
statements and log occurring errors.
When a teardown function was called because of an exception it will
be passed an error object.
The return values of teardown functions are ignored.
.. admonition:: Debug Note
In debug mode Flask will not tear down a request on an exception
immediately. Instead it will keep it alive so that the interactive
debugger can still access it. This behavior can be controlled
by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable.
"""
self.teardown_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def teardown_appcontext(self, f):
"""Registers a function to be called when the application context
ends. These functions are typically also called when the request
context is popped.
Example::
ctx = app.app_context()
ctx.push()
...
ctx.pop()
When ``ctx.pop()`` is executed in the above example, the teardown
functions are called just before the app context moves from the
stack of active contexts. This becomes relevant if you are using
such constructs in tests.
Since a request context typically also manages an application
context it would also be called when you pop a request context.
When a teardown function was called because of an unhandled exception
it will be passed an error object. If an :meth:`errorhandler` is
registered, it will handle the exception and the teardown will not
receive it.
The return values of teardown functions are ignored.
.. versionadded:: 0.9
"""
self.teardown_appcontext_funcs.append(f)
return f
@setupmethod
def context_processor(self, f):
"""Registers a template context processor function."""
self.template_context_processors[None].append(f)
return f
@setupmethod
def shell_context_processor(self, f):
"""Registers a shell context processor function.
.. versionadded:: 0.11
"""
self.shell_context_processors.append(f)
return f
@setupmethod
def url_value_preprocessor(self, f):
"""Register a URL value preprocessor function for all view
functions in the application. These functions will be called before the
:meth:`before_request` functions.
The function can modify the values captured from the matched url before
they are passed to the view. For example, this can be used to pop a
common language code value and place it in ``g`` rather than pass it to
every view.
The function is passed the endpoint name and values dict. The return
value is ignored.
"""
self.url_value_preprocessors.setdefault(None, []).append(f)
return f
@setupmethod
def url_defaults(self, f):
"""Callback function for URL defaults for all view functions of the
application. It's called with the endpoint and values and should
update the values passed in place.
"""
self.url_default_functions.setdefault(None, []).append(f)
return f
def _find_error_handler(self, e):
"""Return a registered error handler for an exception in this order:
blueprint handler for a specific code, app handler for a specific code,
blueprint handler for an exception class, app handler for an exception
class, or ``None`` if a suitable handler is not found.
"""
exc_class, code = self._get_exc_class_and_code(type(e))
for name, c in (
(request.blueprint, code), (None, code),
(request.blueprint, None), (None, None)
):
handler_map = self.error_handler_spec.setdefault(name, {}).get(c)
if not handler_map:
continue
for cls in exc_class.__mro__:
handler = handler_map.get(cls)
if handler is not None:
return handler
def handle_http_exception(self, e):
"""Handles an HTTP exception. By default this will invoke the
registered error handlers and fall back to returning the
exception as response.
.. versionchanged:: 1.0.3
``RoutingException``, used internally for actions such as
slash redirects during routing, is not passed to error
handlers.
.. versionchanged:: 1.0
Exceptions are looked up by code *and* by MRO, so
``HTTPExcpetion`` subclasses can be handled with a catch-all
handler for the base ``HTTPException``.
.. versionadded:: 0.3
"""
# Proxy exceptions don't have error codes. We want to always return
# those unchanged as errors
if e.code is None:
return e
# RoutingExceptions are used internally to trigger routing
# actions, such as slash redirects raising RequestRedirect. They
# are not raised or handled in user code.
if isinstance(e, RoutingException):
return e
handler = self._find_error_handler(e)
if handler is None:
return e
return handler(e)
def trap_http_exception(self, e):
"""Checks if an HTTP exception should be trapped or not. By default
this will return ``False`` for all exceptions except for a bad request
key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It
also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``.
This is called for all HTTP exceptions raised by a view function.
If it returns ``True`` for any exception the error handler for this
exception is not called and it shows up as regular exception in the
traceback. This is helpful for debugging implicitly raised HTTP
exceptions.
.. versionchanged:: 1.0
Bad request errors are not trapped by default in debug mode.
.. versionadded:: 0.8
"""
if self.config['TRAP_HTTP_EXCEPTIONS']:
return True
trap_bad_request = self.config['TRAP_BAD_REQUEST_ERRORS']
# if unset, trap key errors in debug mode
if (
trap_bad_request is None and self.debug
and isinstance(e, BadRequestKeyError)
):
return True
if trap_bad_request:
return isinstance(e, BadRequest)
return False
def handle_user_exception(self, e):
"""This method is called whenever an exception occurs that
should be handled. A special case is :class:`~werkzeug
.exceptions.HTTPException` which is forwarded to the
:meth:`handle_http_exception` method. This function will either
return a response value or reraise the exception with the same
traceback.
.. versionchanged:: 1.0
Key errors raised from request data like ``form`` show the
bad key in debug mode rather than a generic bad request
message.
.. versionadded:: 0.7
"""
exc_type, exc_value, tb = sys.exc_info()
assert exc_value is e
# ensure not to trash sys.exc_info() at that point in case someone
# wants the traceback preserved in handle_http_exception. Of course
# we cannot prevent users from trashing it themselves in a custom
# trap_http_exception method so that's their fault then.
if isinstance(e, BadRequestKeyError):
if self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"]:
# Werkzeug < 0.15 doesn't add the KeyError to the 400
# message, add it in manually.
description = e.get_description()
if e.args[0] not in description:
e.description = "KeyError: '{}'".format(*e.args)
else:
# Werkzeug >= 0.15 does add it, remove it in production
e.args = ()
if isinstance(e, HTTPException) and not self.trap_http_exception(e):
return self.handle_http_exception(e)
handler = self._find_error_handler(e)
if handler is None:
reraise(exc_type, exc_value, tb)
return handler(e)
def handle_exception(self, e):
"""Default exception handling that kicks in when an exception
occurs that is not caught. In debug mode the exception will
be re-raised immediately, otherwise it is logged and the handler
for a 500 internal server error is used. If no such handler
exists, a default 500 internal server error message is displayed.
.. versionadded:: 0.3
"""
exc_type, exc_value, tb = sys.exc_info()
got_request_exception.send(self, exception=e)
handler = self._find_error_handler(InternalServerError())
if self.propagate_exceptions:
# if we want to repropagate the exception, we can attempt to
# raise it with the whole traceback in case we can do that
# (the function was actually called from the except part)
# otherwise, we just raise the error again
if exc_value is e:
reraise(exc_type, exc_value, tb)
else:
raise e
self.log_exception((exc_type, exc_value, tb))
if handler is None:
return InternalServerError()
return self.finalize_request(handler(e), from_error_handler=True)
def log_exception(self, exc_info):
"""Logs an exception. This is called by :meth:`handle_exception`
if debugging is disabled and right before the handler is called.
The default implementation logs the exception as error on the
:attr:`logger`.
.. versionadded:: 0.8
"""
self.logger.error('Exception on %s [%s]' % (
request.path,
request.method
), exc_info=exc_info)
def raise_routing_exception(self, request):
"""Exceptions that are recording during routing are reraised with
this method. During debug we are not reraising redirect requests
for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising
a different error instead to help debug situations.
:internal:
"""
if not self.debug \
or not isinstance(request.routing_exception, RequestRedirect) \
or request.method in ('GET', 'HEAD', 'OPTIONS'):
raise request.routing_exception
from .debughelpers import FormDataRoutingRedirect
raise FormDataRoutingRedirect(request)
def dispatch_request(self):
"""Does the request dispatching. Matches the URL and returns the
return value of the view or error handler. This does not have to
be a response object. In order to convert the return value to a
proper response object, call :func:`make_response`.
.. versionchanged:: 0.7
This no longer does the exception handling, this code was
moved to the new :meth:`full_dispatch_request`.
"""
req = _request_ctx_stack.top.request
if req.routing_exception is not None:
self.raise_routing_exception(req)
rule = req.url_rule
# if we provide automatic options for this URL and the
# request came with the OPTIONS method, reply automatically
if getattr(rule, 'provide_automatic_options', False) \
and req.method == 'OPTIONS':
return self.make_default_options_response()
# otherwise dispatch to the handler for that endpoint
return self.view_functions[rule.endpoint](**req.view_args)
def full_dispatch_request(self):
"""Dispatches the request and on top of that performs request
pre and postprocessing as well as HTTP exception catching and
error handling.
.. versionadded:: 0.7
"""
self.try_trigger_before_first_request_functions()
try:
request_started.send(self)
rv = self.preprocess_request()
if rv is None:
rv = self.dispatch_request()
except Exception as e:
rv = self.handle_user_exception(e)
return self.finalize_request(rv)
def finalize_request(self, rv, from_error_handler=False):
"""Given the return value from a view function this finalizes
the request by converting it into a response and invoking the
postprocessing functions. This is invoked for both normal
request dispatching as well as error handlers.
Because this means that it might be called as a result of a
failure a special safe mode is available which can be enabled
with the `from_error_handler` flag. If enabled, failures in
response processing will be logged and otherwise ignored.
:internal:
"""
response = self.make_response(rv)
try:
response = self.process_response(response)
request_finished.send(self, response=response)
except Exception:
if not from_error_handler:
raise
self.logger.exception('Request finalizing failed with an '
'error while handling an error')
return response
def try_trigger_before_first_request_functions(self):
"""Called before each request and will ensure that it triggers
the :attr:`before_first_request_funcs` and only exactly once per
application instance (which means process usually).
:internal:
"""
if self._got_first_request:
return
with self._before_request_lock:
if self._got_first_request:
return
for func in self.before_first_request_funcs:
func()
self._got_first_request = True
def make_default_options_response(self):
"""This method is called to create the default ``OPTIONS`` response.
This can be changed through subclassing to change the default
behavior of ``OPTIONS`` responses.
.. versionadded:: 0.7
"""
adapter = _request_ctx_stack.top.url_adapter
if hasattr(adapter, 'allowed_methods'):
methods = adapter.allowed_methods()
else:
# fallback for Werkzeug < 0.7
methods = []
try:
adapter.match(method='--')
except MethodNotAllowed as e:
methods = e.valid_methods
except HTTPException as e:
pass
rv = self.response_class()
rv.allow.update(methods)
return rv
def should_ignore_error(self, error):
"""This is called to figure out if an error should be ignored
or not as far as the teardown system is concerned. If this
function returns ``True`` then the teardown handlers will not be
passed the error.
.. versionadded:: 0.10
"""
return False
def make_response(self, rv):
"""Convert the return value from a view function to an instance of
:attr:`response_class`.
:param rv: the return value from the view function. The view function
must return a response. Returning ``None``, or the view ending
without returning, is not allowed. The following types are allowed
for ``view_rv``:
``str`` (``unicode`` in Python 2)
A response object is created with the string encoded to UTF-8
as the body.
``bytes`` (``str`` in Python 2)
A response object is created with the bytes as the body.
``tuple``
Either ``(body, status, headers)``, ``(body, status)``, or
``(body, headers)``, where ``body`` is any of the other types
allowed here, ``status`` is a string or an integer, and
``headers`` is a dictionary or a list of ``(key, value)``
tuples. If ``body`` is a :attr:`response_class` instance,
``status`` overwrites the exiting value and ``headers`` are
extended.
:attr:`response_class`
The object is returned unchanged.
other :class:`~werkzeug.wrappers.Response` class
The object is coerced to :attr:`response_class`.
:func:`callable`
The function is called as a WSGI application. The result is
used to create a response object.
.. versionchanged:: 0.9
Previously a tuple was interpreted as the arguments for the
response object.
"""
status = headers = None
# unpack tuple returns
if isinstance(rv, tuple):
len_rv = len(rv)
# a 3-tuple is unpacked directly
if len_rv == 3:
rv, status, headers = rv
# decide if a 2-tuple has status or headers
elif len_rv == 2:
if isinstance(rv[1], (Headers, dict, tuple, list)):
rv, headers = rv
else:
rv, status = rv
# other sized tuples are not allowed
else:
raise TypeError(
'The view function did not return a valid response tuple.'
' The tuple must have the form (body, status, headers),'
' (body, status), or (body, headers).'
)
# the body must not be None
if rv is None:
raise TypeError(
'The view function did not return a valid response. The'
' function either returned None or ended without a return'
' statement.'
)
# make sure the body is an instance of the response class
if not isinstance(rv, self.response_class):
if isinstance(rv, (text_type, bytes, bytearray)):
# let the response class set the status and headers instead of
# waiting to do it manually, so that the class can handle any
# special logic
rv = self.response_class(rv, status=status, headers=headers)
status = headers = None
else:
# evaluate a WSGI callable, or coerce a different response
# class to the correct type
try:
rv = self.response_class.force_type(rv, request.environ)
except TypeError as e:
new_error = TypeError(
'{e}\nThe view function did not return a valid'
' response. The return type must be a string, tuple,'
' Response instance, or WSGI callable, but it was a'
' {rv.__class__.__name__}.'.format(e=e, rv=rv)
)
reraise(TypeError, new_error, sys.exc_info()[2])
# prefer the status if it was provided
if status is not None:
if isinstance(status, (text_type, bytes, bytearray)):
rv.status = status
else:
rv.status_code = status
# extend existing headers with provided headers
if headers:
rv.headers.extend(headers)
return rv
def create_url_adapter(self, request):
"""Creates a URL adapter for the given request. The URL adapter
is created at a point where the request context is not yet set
up so the request is passed explicitly.
.. versionadded:: 0.6
.. versionchanged:: 0.9
This can now also be called without a request object when the
URL adapter is created for the application context.
.. versionchanged:: 1.0
:data:`SERVER_NAME` no longer implicitly enables subdomain
matching. Use :attr:`subdomain_matching` instead.
"""
if request is not None:
# If subdomain matching is disabled (the default), use the
# default subdomain in all cases. This should be the default
# in Werkzeug but it currently does not have that feature.
subdomain = ((self.url_map.default_subdomain or None)
if not self.subdomain_matching else None)
return self.url_map.bind_to_environ(
request.environ,
server_name=self.config['SERVER_NAME'],
subdomain=subdomain)
# We need at the very least the server name to be set for this
# to work.
if self.config['SERVER_NAME'] is not None:
return self.url_map.bind(
self.config['SERVER_NAME'],
script_name=self.config['APPLICATION_ROOT'],
url_scheme=self.config['PREFERRED_URL_SCHEME'])
def inject_url_defaults(self, endpoint, values):
"""Injects the URL defaults for the given endpoint directly into
the values dictionary passed. This is used internally and
automatically called on URL building.
.. versionadded:: 0.7
"""
funcs = self.url_default_functions.get(None, ())
if '.' in endpoint:
bp = endpoint.rsplit('.', 1)[0]
funcs = chain(funcs, self.url_default_functions.get(bp, ()))
for func in funcs:
func(endpoint, values)
def handle_url_build_error(self, error, endpoint, values):
"""Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`.
"""
exc_type, exc_value, tb = sys.exc_info()
for handler in self.url_build_error_handlers:
try:
rv = handler(error, endpoint, values)
if rv is not None:
return rv
except BuildError as e:
# make error available outside except block (py3)
error = e
# At this point we want to reraise the exception. If the error is
# still the same one we can reraise it with the original traceback,
# otherwise we raise it from here.
if error is exc_value:
reraise(exc_type, exc_value, tb)
raise error
def preprocess_request(self):
"""Called before the request is dispatched. Calls
:attr:`url_value_preprocessors` registered with the app and the
current blueprint (if any). Then calls :attr:`before_request_funcs`
registered with the app and the blueprint.
If any :meth:`before_request` handler returns a non-None value, the
value is handled as if it was the return value from the view, and
further request handling is stopped.
"""
bp = _request_ctx_stack.top.request.blueprint
funcs = self.url_value_preprocessors.get(None, ())
if bp is not None and bp in self.url_value_preprocessors:
funcs = chain(funcs, self.url_value_preprocessors[bp])
for func in funcs:
func(request.endpoint, request.view_args)
funcs = self.before_request_funcs.get(None, ())
if bp is not None and bp in self.before_request_funcs:
funcs = chain(funcs, self.before_request_funcs[bp])
for func in funcs:
rv = func()
if rv is not None:
return rv
def process_response(self, response):
"""Can be overridden in order to modify the response object
before it's sent to the WSGI server. By default this will
call all the :meth:`after_request` decorated functions.
.. versionchanged:: 0.5
As of Flask 0.5 the functions registered for after request
execution are called in reverse order of registration.
:param response: a :attr:`response_class` object.
:return: a new response object or the same, has to be an
instance of :attr:`response_class`.
"""
ctx = _request_ctx_stack.top
bp = ctx.request.blueprint
funcs = ctx._after_request_functions
if bp is not None and bp in self.after_request_funcs:
funcs = chain(funcs, reversed(self.after_request_funcs[bp]))
if None in self.after_request_funcs:
funcs = chain(funcs, reversed(self.after_request_funcs[None]))
for handler in funcs:
response = handler(response)
if not self.session_interface.is_null_session(ctx.session):
self.session_interface.save_session(self, ctx.session, response)
return response
def do_teardown_request(self, exc=_sentinel):
"""Called after the request is dispatched and the response is
returned, right before the request context is popped.
This calls all functions decorated with
:meth:`teardown_request`, and :meth:`Blueprint.teardown_request`
if a blueprint handled the request. Finally, the
:data:`request_tearing_down` signal is sent.
This is called by
:meth:`RequestContext.pop() <flask.ctx.RequestContext.pop>`,
which may be delayed during testing to maintain access to
resources.
:param exc: An unhandled exception raised while dispatching the
request. Detected from the current exception information if
not passed. Passed to each teardown function.
.. versionchanged:: 0.9
Added the ``exc`` argument.
"""
if exc is _sentinel:
exc = sys.exc_info()[1]
funcs = reversed(self.teardown_request_funcs.get(None, ()))
bp = _request_ctx_stack.top.request.blueprint
if bp is not None and bp in self.teardown_request_funcs:
funcs = chain(funcs, reversed(self.teardown_request_funcs[bp]))
for func in funcs:
func(exc)
request_tearing_down.send(self, exc=exc)
def do_teardown_appcontext(self, exc=_sentinel):
"""Called right before the application context is popped.
When handling a request, the application context is popped
after the request context. See :meth:`do_teardown_request`.
This calls all functions decorated with
:meth:`teardown_appcontext`. Then the
:data:`appcontext_tearing_down` signal is sent.
This is called by
:meth:`AppContext.pop() <flask.ctx.AppContext.pop>`.
.. versionadded:: 0.9
"""
if exc is _sentinel:
exc = sys.exc_info()[1]
for func in reversed(self.teardown_appcontext_funcs):
func(exc)
appcontext_tearing_down.send(self, exc=exc)
def app_context(self):
"""Create an :class:`~flask.ctx.AppContext`. Use as a ``with``
block to push the context, which will make :data:`current_app`
point at this application.
An application context is automatically pushed by
:meth:`RequestContext.push() <flask.ctx.RequestContext.push>`
when handling a request, and when running a CLI command. Use
this to manually create a context outside of these situations.
::
with app.app_context():
init_db()
See :doc:`/appcontext`.
.. versionadded:: 0.9
"""
return AppContext(self)
def request_context(self, environ):
"""Create a :class:`~flask.ctx.RequestContext` representing a
WSGI environment. Use a ``with`` block to push the context,
which will make :data:`request` point at this request.
See :doc:`/reqcontext`.
Typically you should not call this from your own code. A request
context is automatically pushed by the :meth:`wsgi_app` when
handling a request. Use :meth:`test_request_context` to create
an environment and context instead of this method.
:param environ: a WSGI environment
"""
return RequestContext(self, environ)
def test_request_context(self, *args, **kwargs):
"""Create a :class:`~flask.ctx.RequestContext` for a WSGI
environment created from the given values. This is mostly useful
during testing, where you may want to run a function that uses
request data without dispatching a full request.
See :doc:`/reqcontext`.
Use a ``with`` block to push the context, which will make
:data:`request` point at the request for the created
environment. ::
with test_request_context(...):
generate_report()
When using the shell, it may be easier to push and pop the
context manually to avoid indentation. ::
ctx = app.test_request_context(...)
ctx.push()
...
ctx.pop()
Takes the same arguments as Werkzeug's
:class:`~werkzeug.test.EnvironBuilder`, with some defaults from
the application. See the linked Werkzeug docs for most of the
available arguments. Flask-specific behavior is listed here.
:param path: URL path being requested.
:param base_url: Base URL where the app is being served, which
``path`` is relative to. If not given, built from
:data:`PREFERRED_URL_SCHEME`, ``subdomain``,
:data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`.
:param subdomain: Subdomain name to append to
:data:`SERVER_NAME`.
:param url_scheme: Scheme to use instead of
:data:`PREFERRED_URL_SCHEME`.
:param data: The request body, either as a string or a dict of
form keys and values.
:param json: If given, this is serialized as JSON and passed as
``data``. Also defaults ``content_type`` to
``application/json``.
:param args: other positional arguments passed to
:class:`~werkzeug.test.EnvironBuilder`.
:param kwargs: other keyword arguments passed to
:class:`~werkzeug.test.EnvironBuilder`.
"""
from flask.testing import make_test_environ_builder
builder = make_test_environ_builder(self, *args, **kwargs)
try:
return self.request_context(builder.get_environ())
finally:
builder.close()
def wsgi_app(self, environ, start_response):
"""The actual WSGI application. This is not implemented in
:meth:`__call__` so that middlewares can be applied without
losing a reference to the app object. Instead of doing this::
app = MyMiddleware(app)
It's a better idea to do this instead::
app.wsgi_app = MyMiddleware(app.wsgi_app)
Then you still have the original application object around and
can continue to call methods on it.
.. versionchanged:: 0.7
Teardown events for the request and app contexts are called
even if an unhandled error occurs. Other events may not be
called depending on when an error occurs during dispatch.
See :ref:`callbacks-and-errors`.
:param environ: A WSGI environment.
:param start_response: A callable accepting a status code,
a list of headers, and an optional exception context to
start the response.
"""
ctx = self.request_context(environ)
error = None
try:
try:
ctx.push()
response = self.full_dispatch_request()
except Exception as e:
error = e
response = self.handle_exception(e)
except:
error = sys.exc_info()[1]
raise
return response(environ, start_response)
finally:
if self.should_ignore_error(error):
error = None
ctx.auto_pop(error)
def __call__(self, environ, start_response):
"""The WSGI server calls the Flask application object as the
WSGI application. This calls :meth:`wsgi_app` which can be
wrapped to applying middleware."""
return self.wsgi_app(environ, start_response)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.name,
)
| [
"[email protected]"
] | |
9573e2d03f021cd4eee80553f9755cffcb49464b | 67d1deb7b3f5d6d03028e938dcba06c44392fe87 | /code/util/datastorage/SPEngine.py | 6a7a12a546407b6594073c15569ddfc6d1e5ddb7 | [] | no_license | GGXH/DS_stock | 389881403b0356072f2f5a7b64bd9098f87f9784 | c516dd4a546cb48d98198239ed33260acfb2adb8 | refs/heads/master | 2020-04-05T19:31:43.306362 | 2019-01-16T15:37:44 | 2019-01-16T15:37:44 | 157,138,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | import util.status.messager as msg
class SPEngine:
def __init__(self):
| [
"[email protected]"
] | |
31541650d86bad1487aa424be00d8d85b69f5bed | a7da58ad91b007b3650003708eb91928f1e3684a | /bt5/erp5_wizard/WorkflowTemplateItem/portal_workflow/express_person_interaction_workflow/scripts/Assigment_openGlobalUserAssignment.py | edc6962f8dcdb769f3c5bbbaa8e6713520f5fb3f | [] | no_license | jgpjuniorj/j | 042d1bd7710fa2830355d4312a6b76103e29639d | dc02bfa887ffab9841abebc3f5c16d874388cef5 | refs/heads/master | 2021-01-01T09:26:36.121339 | 2020-01-31T10:34:17 | 2020-02-07T04:39:18 | 239,214,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | assignment = state_change['object']
person = assignment.getParentValue()
person.Person_validateGlobalUserAccount()
| [
"[email protected]"
] | |
29062d2f0a805afd6dd76b3910c7c60daac28586 | 4178f2916d2da72cbb45454fbed941dcfe8f6460 | /POM_test/TestCase/Planting/TC_024.py | c2bd74617e33da7d058bb5c6912275c3dd5bd85e | [] | no_license | maxcrup007/Selenium_Webdriver_Python | 15196cb04ba5cafdc5b776c26d167f0b48fb0e14 | 6be7f0b9f53df1ba592957029e8a4d22e409d1c4 | refs/heads/main | 2023-03-24T21:04:31.976451 | 2021-03-22T09:16:04 | 2021-03-22T09:16:04 | 349,379,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,607 | py |
# ทดสอบการเข้าใช้งานของ "ปลูก" (เลือกจำนวนเพาะปลูกมากกว่าพื้นที่)
import time
import unittest
import sys
from selenium import webdriver
from POM_test.login import *
from POM_test.plantPage import *
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "...", "..."))
class TestPlanting_24(unittest.TestCase):
@classmethod
def setUpClass(self):
self.driver = webdriver.Chrome(executable_path="C:/Users/voraw/Downloads/Compressed/webdriver/chromedriver/chromedriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def test_login_valid(self):
driver = self.driver
self.driver.get("https://top-upstream-client.mulberrysoft.com/#/older/activity")
login = LoginPage(driver)
login.enter_username("demo005")
login.enter_password("123456")
login.click_login()
time.sleep(2)
plant = PlantPage(driver)
plant.into_plantPage()
plant.upload_picture()
time.sleep(2)
plant.next_function()
time.sleep(2)
plant.plant_enter_value("1000000")
# เลือกจำนวนเพาะปลูกมากกว่าพื้นที่
time.sleep(2)
plant.plant_enter_area("10")
time.sleep(2)
plant.plant_enter_crops()
time.sleep(2)
# driver.find_element_by_xpath("//ion-list[2]/ion-item/ion-select").click()
# driver.find_element_by_xpath("//button/div/div[2]").click()
# driver.find_element_by_xpath("//button[2]/span").click()
plant.plant_enter_garden()
time.sleep(2)
plant.plant_enter_unit()
time.sleep(2)
plant.plant_enter_area_unit()
time.sleep(2)
########################################################################
plant.plant_enter_products("100")
time.sleep(2)
plant.plant_enter_unit_products()
time.sleep(2)
plant.plant_enter_paid("1500")
time.sleep(2)
plant.plant_enter_submit()
time.sleep(2)
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
print("Test Completed")
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e3275a76d2f0ad30f2d8dc25ef528b0dd70399d0 | 6f9a29946dc107cd44d88cf07c9d715ebe4208be | /source/services/music/music_service.py | 61d9a993b50f6a801a8e9c6457c4172dae92f090 | [] | no_license | cash2one/gongzhuhao | 66bb14439a2265175bdd4b2f585456fcf47922bf | 0596bcb429674b75243d343c73e0f022b6d86820 | refs/heads/master | 2021-01-18T15:38:37.258737 | 2015-10-28T09:13:33 | 2015-10-28T09:13:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,709 | py | #encoding:utf-8
__author__ = 'frank'
from services.base_services import BaseService
from models.share_do import ShareMusic
from utils.upload_utile import delete_from_oss
from tornado.options import options
class MusicServices(BaseService):
def create_share_music(self,**kwargs):
'''
todo:新增一首背景歌曲
:param kwargs:
:return:
'''
share_music = ShareMusic()
share_music.Fmusic_name = kwargs.get('music_name')
share_music.Fmusic_url = kwargs.get('request_url')
self.db.add(share_music)
self.db.commit()
return share_music
def query_share_music(self,**kwargs):
'''
todo:查询背景歌曲
:param kwargs:
:return:
'''
query = self.db.query(ShareMusic).filter(ShareMusic.Fdeleted == 0)
if kwargs.get('start_date',''):
query = query.filter(ShareMusic.Fcreate_time > kwargs.get('start_date'))
if kwargs.get('end_date',''):
query = query.filter(ShareMusic.Fcreate_time < kwargs.get('end_date')+' 23:59:59')
if kwargs.get('music_name',''):
query = query.filter(ShareMusic.Fmusic_name.like('%'+kwargs.get('music_name')+'%'))
return query
def delete_music(self,music_id):
'''
todo:删除背景歌曲
:param music_id: 歌曲id
:return:
'''
query = self.db.query(ShareMusic).filter(ShareMusic.Fdeleted == 0,ShareMusic.Fid == music_id)
filename = query.scalar().Fmusic_url[34:]
data = {}
data['Fdeleted'] = 1
query.update(data)
self.db.commit()
delete_from_oss(options.MEDIA_BUCKET,filename)
| [
"[email protected]"
] | |
ca80285ee2929ac20cf43ad7fff92fb60b9efdea | f81c8e4d702d5c88af92c691d35b6f9c0d2f4390 | /backend/dark_waterfall_26026/wsgi.py | e5039146e98431c055564aea9a661c25a52173fd | [] | no_license | crowdbotics-apps/dark-waterfall-26026 | bdfd44240dae3c1ad20ed8b7a8da701308db5958 | 95f9eda959b6d21778ff59db2c5c9a585d6a670c | refs/heads/master | 2023-04-12T17:31:25.091727 | 2021-04-29T19:14:56 | 2021-04-29T19:14:56 | 362,922,208 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | """
WSGI config for dark_waterfall_26026 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dark_waterfall_26026.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
fa65a404c6278a30b5a8e1d2c8079c85f4f85dce | 449f6888bff99d7e4fd86fa6ffa6b3316084e34e | /Solutions/018.py | b91be816ebd66827c26c6ae1526c59a9b3b118b9 | [
"MIT"
] | permissive | All3yp/Daily-Coding-Problem-Solutions | e94679a5858b8a83ffe58d14b824fe80de21a694 | 199b9606474edb45bd14b20b511b691ada437586 | refs/heads/master | 2023-03-18T21:06:30.675503 | 2021-03-13T03:52:31 | 2021-03-13T03:52:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,622 | py | """
Problem:
Given an array of integers and a number k, where 1 <= k <= length of the array, compute
the maximum values of each subarray of length k.
For example, given array = [10, 5, 2, 7, 8, 7] and k = 3, we should get: [10, 7, 8, 8],
since:
10 = max(10, 5, 2)
7 = max(5, 2, 7)
8 = max(2, 7, 8)
8 = max(7, 8, 7)
Do this in O(n) time and O(k) space. You can modify the input array in-place and you do
not need to store the results. You can simply print them out as you compute them.
"""
from collections import deque
from typing import List
def calc_max_per_k_elems(arr: List[int], k: int) -> List[int]:
length = len(arr)
if not arr:
return None
if length <= k:
return max(arr)
# storing results (even though the problem states it can be directly printed)
result = []
dq = deque()
# calculating the 1st element
for i in range(k):
while dq and arr[dq[-1]] < arr[i]:
dq.pop()
dq.append(i)
result.append(arr[dq[0]])
# generating the rest of the resultant elements
for i in range(k, length):
# removing all elements apart from the last k elements
while dq and dq[0] <= i - k:
dq.popleft()
# removing the elements smaller than the current element
while dq and arr[dq[-1]] < arr[i]:
dq.pop()
dq.append(i)
result.append(arr[dq[0]])
return result
if __name__ == "__main__":
print(calc_max_per_k_elems([10, 5, 2, 7, 8, 7], 3))
print(calc_max_per_k_elems([1, 91, 17, 46, 45, 36, 9], 3))
"""
SPECS:
TIME COMPLEXITY: O(n)
SPACE COMPLEXITY: O(k)
"""
| [
"[email protected]"
] | |
15b30860d116d827c4c3de9db43e689dffc3d70f | 6c6531b6f93817a2720ff9b78fce6ad4d5bb500c | /PericiasMedicas/company/migrations/0007_auto_20191230_1711.py | 5603642c2f74d64ccb13c8e644b23e6a6f6f2902 | [] | no_license | massariolmc/periciasmedicas | 6d3c142a5f5e308b049d57b30d698526c8aecda3 | 9b5b0e192bf51bb1b297f0983b2a0ab0c24b31b1 | refs/heads/master | 2022-12-08T11:13:10.981476 | 2020-02-21T23:32:44 | 2020-02-21T23:32:44 | 235,667,801 | 0 | 0 | null | 2022-11-22T05:15:44 | 2020-01-22T21:12:16 | JavaScript | UTF-8 | Python | false | false | 472 | py | # Generated by Django 2.2.7 on 2019-12-30 21:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0006_auto_20191230_1629'),
]
operations = [
migrations.AlterField(
model_name='company',
name='state_registration',
field=models.CharField(blank=True, default='', max_length=100, null=True, verbose_name='Inscrição Estadual'),
),
]
| [
"[email protected]"
] | |
ae7a1e257d3423cfd604b1e6c27ffe19ee1012f5 | 6b3e8b4291c67195ad51e356ba46602a15d5fe38 | /rastervision2/examples/utils.py | d521e74560b2de4494f0d0ff4344208ee3e221b0 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | csaybar/raster-vision | 4f5bb1125d4fb3ae5c455db603d8fb749221dd74 | 617ca15f64e3b8a391432306a743f7d0dfff352f | refs/heads/master | 2021-02-26T19:02:53.752971 | 2020-02-27T17:25:31 | 2020-02-27T17:25:31 | 245,547,406 | 2 | 1 | NOASSERTION | 2020-03-07T01:24:09 | 2020-03-07T01:24:08 | null | UTF-8 | Python | false | false | 4,864 | py | import csv
from io import StringIO
import tempfile
import os
import rasterio
from shapely.strtree import STRtree
from shapely.geometry import shape, mapping
import shapely
from rastervision.core import Box
from rastervision.data import RasterioCRSTransformer, GeoJSONVectorSource
from rastervision.utils.files import (file_to_str, file_exists, get_local_path,
upload_or_copy, make_dir, json_to_file)
from rastervision.filesystem import S3FileSystem
def str_to_bool(x):
if type(x) == str:
if x.lower() == 'true':
return True
elif x.lower() == 'false':
return False
else:
raise ValueError('{} is expected to be true or false'.format(x))
return x
def get_scene_info(csv_uri):
csv_str = file_to_str(csv_uri)
reader = csv.reader(StringIO(csv_str), delimiter=',')
return list(reader)
def crop_image(image_uri, window, crop_uri):
im_dataset = rasterio.open(image_uri)
rasterio_window = window.rasterio_format()
im = im_dataset.read(window=rasterio_window)
with tempfile.TemporaryDirectory() as tmp_dir:
crop_path = get_local_path(crop_uri, tmp_dir)
make_dir(crop_path, use_dirname=True)
meta = im_dataset.meta
meta['width'], meta['height'] = window.get_width(), window.get_height()
meta['transform'] = rasterio.windows.transform(
rasterio_window, im_dataset.transform)
with rasterio.open(crop_path, 'w', **meta) as dst:
dst.colorinterp = im_dataset.colorinterp
dst.write(im)
upload_or_copy(crop_path, crop_uri)
def save_image_crop(image_uri,
image_crop_uri,
label_uri=None,
label_crop_uri=None,
size=600,
min_features=10,
vector_labels=True):
"""Save a crop of an image to use for testing.
If label_uri is set, the crop needs to cover >= min_features.
Args:
image_uri: URI of original image
image_crop_uri: URI of cropped image to save
label_uri: optional URI of label file
label_crop_uri: optional URI of cropped labels to save
size: height and width of crop
Raises:
ValueError if cannot find a crop satisfying min_features constraint.
"""
if not file_exists(image_crop_uri):
print('Saving test crop to {}...'.format(image_crop_uri))
old_environ = os.environ.copy()
try:
request_payer = S3FileSystem.get_request_payer()
if request_payer == 'requester':
os.environ['AWS_REQUEST_PAYER'] = request_payer
im_dataset = rasterio.open(image_uri)
h, w = im_dataset.height, im_dataset.width
extent = Box(0, 0, h, w)
windows = extent.get_windows(size, size)
if label_uri and vector_labels:
crs_transformer = RasterioCRSTransformer.from_dataset(
im_dataset)
vs = GeoJSONVectorSource(label_uri, crs_transformer)
geojson = vs.get_geojson()
geoms = []
for f in geojson['features']:
g = shape(f['geometry'])
geoms.append(g)
tree = STRtree(geoms)
def p2m(x, y, z=None):
return crs_transformer.pixel_to_map((x, y))
for w in windows:
use_window = True
if label_uri and vector_labels:
w_polys = tree.query(w.to_shapely())
use_window = len(w_polys) >= min_features
if use_window and label_crop_uri is not None:
print('Saving test crop labels to {}...'.format(
label_crop_uri))
label_crop_features = [
mapping(shapely.ops.transform(p2m, wp))
for wp in w_polys
]
label_crop_json = {
'type':
'FeatureCollection',
'features': [{
'geometry': f
} for f in label_crop_features]
}
json_to_file(label_crop_json, label_crop_uri)
if use_window:
crop_image(image_uri, w, image_crop_uri)
if not vector_labels and label_uri and label_crop_uri:
crop_image(label_uri, w, label_crop_uri)
break
if not use_window:
raise ValueError('Could not find a good crop.')
finally:
os.environ.clear()
os.environ.update(old_environ)
| [
"[email protected]"
] | |
45158fd73f856d10753fdab1158bbd52cbc902c4 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /es_maml/policies.py | f901bf44a33836629722349dd7c0953bd0a94da7 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 9,160 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains policies used in MAML."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_probability as tfp
class Policy(object):
r"""Abstract class for different policies \Pi: S -> A.
Class is responsible for creating different policies and provides an interface
for computing actions recommended by policies in different input states.
In particular, this class provides an interface that accepts compressed
vectorized form of the policy and decompresses it.
Standard procedure for improving the parameters of the policy with an
interface given by the class:
policy = policies.ParticularClassThatInheritsFromBaseClass(...)
vectorized_network = policy.get_initial()
while(...):
new_vectorized_network = SomeTransformationOf(vectorized_network)
policy.update(new_vectorized_network)
and SomeTransformationOf is a single step of some optimization procedure such
as gradient descent that sees the policy in the vectorized form.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def update(self, vectorized_parameters):
"""Updates the policy using new parameters from <vectorized_parameters>.
Updates the parameters of the policy using new parameters encoded by
<vectorized_parameters>. The size of the vector <vectorized_parameters>
should be the number of all biases and weights of the neural network.
We use the convention where parameters encoding matrices of connections of
the neural network come in <vectorized_parameters> before parameters
encoding biases and furthermore the order in <vectorized_parameters> of
parameters encoding weights for different matrices/biases-vectors is
inherited from the order of these matrices/biases-vectors in the
decompressed neural network. Details regarding compression depend on
different neural network architectures used (such as: structured and
unstructured) and are given in the implementations of that abstract method
in specific classes that inherit from Policy.
Args:
vectorized_parameters: parameters of the neural network in the vectorized
form.
Returns:
"""
raise NotImplementedError('Abstract method')
@abc.abstractmethod
def get_action(self, state):
"""Returns the action proposed by a policy in a given state.
Returns an action proposed by the policy in <state>.
Args:
state: input state
Returns:
Action proposed by the policy represented by an object of the class in a
given state.
"""
raise NotImplementedError('Abstract method')
@abc.abstractmethod
def get_initial(self):
"""Returns the default parameters of the policy in the vectorized form.
Initial parameters of the policy are output in the vectorized form.
Args:
Returns:
Numpy array encoding in the vectorized form initial parameters of the
policy.
"""
raise NotImplementedError('Abstract method')
@abc.abstractmethod
def get_total_num_parameters(self):
"""Outputs total number of parameters of the policy.
Args:
Returns:
Total number of parameters used by the policy.
"""
raise NotImplementedError('Abstract method')
class BasicTFPolicy(Policy):
"""Basic Policy implemented in Tensorflow."""
def __init__(self, state_dimensionality, action_dimensionality, hidden_layers,
scope):
self.state_dimensionality = state_dimensionality
self.action_dimensionality = action_dimensionality
self.input_ph = tf.placeholder(
dtype=tf.float32, shape=[None, self.state_dimensionality])
self.output_ph = tf.placeholder(
dtype=tf.float32, shape=[None, self.action_dimensionality])
with tf.variable_scope(scope, reuse=tf.AUTO_REUSE):
self.out = self.input_ph
for i, layer_size in enumerate(hidden_layers):
self.out = tf.layers.dense(
self.out, layer_size, activation=tf.nn.relu, name='h' + str(i))
self.main_out = tf.layers.dense(
self.out, self.action_dimensionality, name='main_out')
self.secondary_out = tf.layers.dense(
self.out, self.action_dimensionality, name='secondary_out')
self.action = tfp.distributions.Normal(
loc=self.main_out, scale=self.secondary_out).sample()
self.loss = tf.losses.mean_squared_error(self.main_out, self.output_ph)
self.obj_tensor = -1.0 * self.loss
self.tf_params = tf.trainable_variables(scope)
self.shapes = [v.shape.as_list() for v in self.tf_params]
self.sizes = [int(np.prod(s)) for s in self.shapes]
self.total_nb_parameters = sum(self.sizes)
self.assign_ph_dict = {
v: tf.placeholder(dtype=tf.float32, shape=v.shape.as_list())
for v in self.tf_params
}
self.assign_ops = []
for v in self.tf_params:
self.assign_ops.append(v.assign(self.assign_ph_dict[v]))
with tf.control_dependencies(self.assign_ops):
# This is needed to input Numpy Params into network temporarily
self.action = tf.identity(self.action)
self.sess = tf.Session()
self.sess.run(tf.global_variables_initializer())
self.np_params = np.concatenate([
self.sess.run(tf.reshape(tf_param, [-1])) for tf_param in self.tf_params
])
def update(self, flattened_weights):
self.np_params = flattened_weights
def get_action(self, state):
ph_dict = {}
for ind, v in enumerate(self.tf_params):
numpy_flat_val = self.np_params[sum(self.sizes[:ind]
):sum(self.sizes[:ind + 1])]
numpy_reshaped = np.reshape(numpy_flat_val, self.shapes[ind])
v_ph = self.assign_ph_dict[v]
ph_dict[v_ph] = numpy_reshaped
ph_dict[self.input_ph] = state.reshape(-1, self.state_dimensionality)
action_numpy = self.sess.run(self.action, feed_dict=ph_dict)
return action_numpy.flatten()
def get_initial(self):
return self.np_params
def get_total_num_parameters(self):
return self.total_nb_parameters
class DeterministicNumpyPolicy(Policy):
"""Deterministic Policy implemented in Numpy."""
def __init__(self,
state_dimensionality,
action_dimensionality,
hidden_layers,
init_sd=None):
self.state_dimensionality = state_dimensionality
self.action_dimensionality = action_dimensionality
self.layers = hidden_layers + [action_dimensionality]
self.layers.insert(0, state_dimensionality)
self.weights = []
self.biases = []
self.weight_positions = []
self.bias_positions = []
self.init_params = []
flat_pos = 0
for dims in zip(self.layers[:-1], self.layers[1:]):
in_size = dims[0]
out_size = dims[1]
if init_sd is None:
init_sd = np.sqrt(2.0 / (in_size))
init_weights = init_sd * np.random.normal(0, 1, size=(out_size * in_size))
self.init_params.extend(init_weights.tolist())
self.weights.append(np.reshape(init_weights, (out_size, in_size)))
self.weight_positions.append(flat_pos)
flat_pos += out_size * in_size
init_biases = np.zeros(out_size)
self.init_params.extend(init_biases.tolist())
self.biases.append(init_biases)
self.bias_positions.append(flat_pos)
flat_pos += out_size
self.weight_positions.append(flat_pos)
def update(self, flat_weights):
for i, dims in enumerate(zip(self.layers[:-1], self.layers[1:])):
in_size = dims[0]
out_size = dims[1]
start_pos = self.weight_positions[i]
end_pos = start_pos + (out_size * in_size)
self.weights[i] = np.reshape(
np.array(flat_weights[start_pos:end_pos]), (out_size, in_size))
start_pos = self.bias_positions[i]
end_pos = start_pos + out_size
self.biases[i] = np.reshape(
np.array(flat_weights[start_pos:end_pos]), (out_size))
def get_action(self, state):
neuron_values = np.reshape(np.array(state), (self.state_dimensionality))
for i in range(len(self.weights)):
neuron_values = np.matmul(self.weights[i], neuron_values)
neuron_values += self.biases[i]
if i < len(self.weights) - 1:
np.maximum(neuron_values, 0, neuron_values)
np.tanh(neuron_values, neuron_values) # this is sometimes not needed
return neuron_values
def get_initial(self):
return np.array(self.init_params)
def get_total_num_parameters(self):
return self.weight_positions[-1]
| [
"[email protected]"
] | |
6035dce05ab1ceb238455998bedfa82823ff466e | 3471728291ab015e6780763218f96a369897f5c4 | /imagefactory_plugins/OpenStack/glance_upload.py | c2b28347f49d65700b6205043a0e6637b27930f4 | [
"Apache-2.0"
] | permissive | zyga/imagefactory | 913fb4a987a746cff72f3074e0e338e896ac2e65 | b2a57168f1ef6608aedad73ed7ccd1e3626b2967 | refs/heads/master | 2020-03-24T07:33:43.270977 | 2018-06-26T19:37:55 | 2018-06-26T19:37:55 | 142,568,326 | 1 | 0 | Apache-2.0 | 2018-07-27T11:20:36 | 2018-07-27T11:20:36 | null | UTF-8 | Python | false | false | 1,452 | py | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from glance import client as glance_client
from pprint import pprint
def glance_upload(image_filename, creds = {'auth_url': None, 'password': None, 'strategy': 'noauth', 'tenant': None, 'username': None},
host = "0.0.0.0", port = "9292", token = None):
image_meta = {'container_format': 'bare',
'disk_format': 'qcow2',
'is_public': True,
'min_disk': 0,
'min_ram': 0,
'name': 'Factory Test Image',
'properties': {'distro': 'rhel'}}
c = glance_client.Client(host=host, port=port,
auth_tok=token, creds=creds)
image_data = open(image_filename, "r")
image_meta = c.add_image(image_meta, image_data)
image_data.close()
return image_meta['id']
image_id = glance_upload("/root/base-image-f19e3f9b-5905-4b66-acb2-2e25395fdff7.qcow2")
print image_id
| [
"[email protected]"
] | |
c801f423eba575edaad8ae847ba8affbbb0388d1 | 7e2214619d5948d0d5f7e22f46dee679d722d7b3 | /dealOrNoDeal.py | 309ffe88c017c45a4345d69b454a2286181be26f | [] | no_license | udwivedi394/misc | ef6add31a92e0d2d0505e8be016f0a868a6ac730 | 64dffb5db04c38465fffb415bec1d433b1caa8f6 | refs/heads/master | 2021-09-09T06:16:32.124586 | 2018-03-14T05:10:55 | 2018-03-14T05:10:55 | 116,167,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,934 | py | #Nest away
import sys
def dealorNoDeal05(A,B):
lookup = [B-i for i in A]
maxi = 0
for i in xrange(1,len(lookup)):
if lookup[i] >= 0:
lookup[i] = lookup[i]+(lookup[i-1] if lookup[i-1] >=0 else 0)
maxi = max(maxi,lookup[i])
return maxi
def dealorNoDeal(A,B):
lookup = A#[B-i for i in A]
maxi = 0
for i in xrange(len(lookup)):
lookup[i] = max(lookup[i],lookup[i]+lookup[i-1] if i>0 else 0)
maxi = max(maxi,lookup[i])
return maxi
def dealorNoDeal03(A,B):
lookup = A
for i in xrange(len(lookup)):
lookup[i] = B-lookup[i]
maxi = 0
for i in xrange(1,len(lookup)):
if lookup[i] >= 0:
lookup[i] = lookup[i]+(lookup[i-1] if lookup[i-1] >=0 else 0)
maxi = max(maxi,lookup[i])
return maxi
def dealorNoDeal04(A,B):
lookup = A
maxi = 0
for i in xrange(len(lookup)):
if B-lookup[i] >= 0:
lookup[i] = (B-lookup[i])+(lookup[i-1] if i > 0 and lookup[i-1] >=0 else 0)
maxi = max(maxi,lookup[i])
else:
lookup[i] = B-lookup[i]
print lookup
return maxi
"""
if __name__=="__main__":
f1 = open("testCaseMaxSeq02.txt",'r')
for x in xrange(int(f1.readline().strip())):
#n,c = map(int,sys.stdin.readline().strip().split())
n = map(int,f1.readline().strip().split())
A = map(int,f1.readline().strip().split())
c = 0
result = dealorNoDeal(A,c)
sys.stdout.write(str(result))
print
f1.close()
"""
if __name__=="__main__":
for x in xrange(int(sys.stdin.readline().strip())):
n,c = map(int,sys.stdin.readline().strip().split())
#n = map(int,sys.stdin.readline().strip().split())
A = map((lambda x: c-int(x)),sys.stdin.readline().strip().split())
#c = 0
result = dealorNoDeal(A,c)
sys.stdout.write(str(result))
print
#"""
| [
"[email protected]"
] | |
0de2dcdd25e4cabb3723ac7cb45dbf917637dc3b | 909b93b8df2a79a2ba7567587604d764a34281fc | /bot/app_streamer.py | 9f885eb613ee7ad8498df4d050264a4778119448 | [] | no_license | ArtemZaZ/PultBot | f321282534c02789ac5b868844da15fe4614b9ac | d23b867eb7eda78b006fa32f503148da2a4d6d7f | refs/heads/master | 2020-03-07T05:52:29.748385 | 2019-12-06T12:23:14 | 2019-12-06T12:23:14 | 127,307,917 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,613 | py | import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst
import sys
import threading
import logging
import numpy as np
from bot.common import *
HOST = '127.0.0.1'
RTP_PORT = 5000
class AppSrcStreamer(object):
def __init__(self, video=VIDEO_MJPEG, resolution=(640, 480), framerate=30,
onFrameCallback=None, useOMX=False, scale=1):
self._host = HOST
self._port = RTP_PORT
self._width = resolution[0]
self._height = resolution[1]
self._scaleWidth = int(self._width * scale)
self._scaleHeight = int(self._height * scale)
self._needFrame = threading.Event() # флаг, необходимо сформировать OpenCV кадр
self.playing = False
self.paused = False
self._onFrameCallback = None
if video != VIDEO_RAW:
if (not onFrameCallback is None) and callable(onFrameCallback):
self._onFrameCallback = onFrameCallback # обработчик события OpenCV кадр готов
# инициализация Gstreamer
Gst.init(None)
# создаем pipeline
self._make_pipeline(video, self._width, self._height, framerate, useOMX, scale)
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect('message', self._onMessage)
self.ready_pipeline()
def _make_pipeline(self, video, width, height, framerate, useOMX, scale):
# Создание GStreamer pipeline
self.pipeline = Gst.Pipeline()
self.rtpbin = Gst.ElementFactory.make('rtpbin')
self.rtpbin.set_property('latency', 200)
self.rtpbin.set_property('drop-on-latency', True) # отбрасывать устаревшие кадры
self.rtpbin.set_property('buffer-mode', 4)
self.rtpbin.set_property('ntp-time-source', 3) # источник времени clock-time
self.rtpbin.set_property('ntp-sync', True)
self.rtpbin.set_property('rtcp-sync-send-time', False)
# настраиваем appsrc
self.appsrc = Gst.ElementFactory.make('appsrc')
self.appsrc.set_property('is-live', True)
if video == VIDEO_H264:
videoStr = 'video/x-h264'
elif video == VIDEO_MJPEG:
videoStr = 'image/jpeg'
elif video == VIDEO_RAW:
videoStr = 'video/x-raw,format=RGB'
capstring = videoStr + ',width=' + str(width) \
+ ',height=' + str(height) + ',framerate=' \
+ str(framerate) + '/1'
srccaps = Gst.Caps.from_string(capstring)
self.appsrc.set_property('caps', srccaps)
# print('RPi camera GST caps: %s' % capstring)
if video == VIDEO_RAW:
self.videoconvertRAW = Gst.ElementFactory.make('videoconvert')
self.videoconvertRAWFilter = Gst.ElementFactory.make('capsfilter', 'videoconvertfilter')
videoconvertCaps = Gst.caps_from_string(
'video/x-raw,format=I420') # формат данных для преобразования в JPEG
self.videoconvertRAWFilter.set_property('caps', videoconvertCaps)
self.jpegenc = Gst.ElementFactory.make('jpegenc')
# self.jpegenc = Gst.ElementFactory.make('vaapijpegenc')
# self.jpegenc = Gst.ElementFactory.make('avenc_ljpeg')
# jpegencCaps = Gst.Caps.from_string('video/x-raw,format=I420')
# self.jpegenc.set_property('caps', jpegencCaps)
if video == VIDEO_H264:
parserName = 'h264parse'
else:
parserName = 'jpegparse'
self.parser = Gst.ElementFactory.make(parserName)
if video == VIDEO_H264:
payloaderName = 'rtph264pay'
# rtph264pay.set_property('config-interval', 10)
# payloadType = 96
else:
payloaderName = 'rtpjpegpay'
# payloadType = 26
self.payloader = Gst.ElementFactory.make(payloaderName)
# payloader.set_property('pt', payloadType)
# For RTP Video
self.udpsink_rtpout = Gst.ElementFactory.make('udpsink', 'udpsink_rtpout')
# self.udpsink_rtpout.set_property('host', self._host)
# self.udpsink_rtpout.set_property('port', self._port)
self.udpsink_rtpout.set_property('sync', False)
self.udpsink_rtpout.set_property('async', False)
self.udpsink_rtcpout = Gst.ElementFactory.make('udpsink', 'udpsink_rtcpout')
# self.udpsink_rtcpout.set_property('host', self._host)
# self.udpsink_rtcpout.set_property('port', self._port + 1)
self.udpsink_rtcpout.set_property('sync', False)
self.udpsink_rtcpout.set_property('async', False)
self.udpsrc_rtcpin = Gst.ElementFactory.make('udpsrc', 'udpsrc_rtcpin')
srcCaps = Gst.Caps.from_string('application/x-rtcp')
# self.udpsrc_rtcpin.set_property('port', self._port + 5)
self.udpsrc_rtcpin.set_property('caps', srcCaps)
# Задаем IP адресс и порт
self.setHost(self._host)
self.setPort(self._port)
if not self._onFrameCallback is None:
self.tee = Gst.ElementFactory.make('tee')
self.rtpQueue = Gst.ElementFactory.make('queue', 'rtp_queue')
self.frameQueue = Gst.ElementFactory.make('queue', 'frame_queue')
if video == VIDEO_H264:
if useOMX:
decoderName = 'omxh264dec' # отлично работает загрузка ЦП 200%
else:
decoderName = 'avdec_h264' # хреново работает загрузка ЦП 120%
# decoder = Gst.ElementFactory.make('avdec_h264_mmal') #не заработал
else:
if useOMX:
decoderName = 'omxmjpegdec' #
else:
decoderName = 'avdec_mjpeg' #
# decoder = Gst.ElementFactory.make('jpegdec') #
self.decoder = Gst.ElementFactory.make(decoderName)
self.videoconvert = Gst.ElementFactory.make('videoconvert')
if scale != 1:
self.videoscale = Gst.ElementFactory.make('videoscale')
self.videoscaleFilter = Gst.ElementFactory.make('capsfilter', 'scalefilter')
videoscaleCaps = Gst.caps_from_string('video/x-raw, width=%d, height=%d' % (
self._scaleWidth, self._scaleHeight)) # формат данных после изменения размера
self.videoscaleFilter.set_property('caps', videoscaleCaps)
### создаем свой sink для перевода из GST в CV
self.appsink = Gst.ElementFactory.make('appsink')
cvCaps = Gst.caps_from_string('video/x-raw, format=RGB') # формат принимаемых данных
self.appsink.set_property('caps', cvCaps)
self.appsink.set_property('sync', False)
# appsink.set_property('async', False)
self.appsink.set_property('drop', True)
self.appsink.set_property('max-buffers', 5)
self.appsink.set_property('emit-signals', True)
self.appsink.connect('new-sample', self._newSample)
# добавляем все элементы в pipeline
elemList = [self.appsrc, self.rtpbin, self.parser, self.payloader, self.udpsink_rtpout,
self.udpsink_rtcpout, self.udpsrc_rtcpin]
if video == VIDEO_RAW:
elemList.extend([self.videoconvertRAW, self.videoconvertRAWFilter, self.jpegenc])
if not self._onFrameCallback is None:
elemList.extend([self.tee, self.rtpQueue, self.frameQueue, self.decoder, self.videoconvert, self.appsink])
if scale != 1:
elemList.extend([self.videoscale, self.videoscaleFilter])
for elem in elemList:
if elem is None:
logging.critical('GST elements could not be null')
sys.exit(1)
self.pipeline.add(elem)
# соединяем элементы
if video == VIDEO_RAW:
ret = self.appsrc.link(self.videoconvertRAW)
ret = ret and self.videoconvertRAW.link(self.videoconvertRAWFilter)
ret = ret and self.videoconvertRAWFilter.link(self.jpegenc)
ret = ret and self.jpegenc.link(self.parser)
else:
ret = self.appsrc.link(self.parser)
# соединяем элементы rtpbin
ret = ret and self.payloader.link_pads('src', self.rtpbin, 'send_rtp_sink_0')
ret = ret and self.rtpbin.link_pads('send_rtp_src_0', self.udpsink_rtpout, 'sink')
ret = ret and self.rtpbin.link_pads('send_rtcp_src_0', self.udpsink_rtcpout, 'sink')
ret = ret and self.udpsrc_rtcpin.link_pads('src', self.rtpbin, 'recv_rtcp_sink_0')
if self._onFrameCallback is None: # трансляция без onFrameCallback, т.е. создаем одну ветку
ret = ret and self.parser.link(self.payloader)
else: # трансляция с передачей кадров в onFrameCallback, создаем две ветки
ret = ret and self.parser.link(self.tee)
# 1-я ветка RTP
ret = ret and self.rtpQueue.link(self.payloader)
# 2-я ветка onFrame
ret = ret and self.frameQueue.link(self.decoder)
if scale != 1:
ret = ret and self.decoder.link(self.videoscale)
ret = ret and self.videoscale.link(self.videoscaleFilter)
ret = ret and self.videoscaleFilter.link(self.videoconvert)
else:
ret = ret and self.decoder.link(self.videoconvert)
ret = ret and self.videoconvert.link(self.appsink)
# подключаем tee к rtpQueue
teeSrcPadTemplate = self.tee.get_pad_template('src_%u')
rtpTeePad = self.tee.request_pad(teeSrcPadTemplate, None, None)
rtpQueuePad = self.rtpQueue.get_static_pad('sink')
ret = ret and (rtpTeePad.link(rtpQueuePad) == Gst.PadLinkReturn.OK)
# подключаем tee к frameQueue
frameTeePad = self.tee.request_pad(teeSrcPadTemplate, None, None)
frameQueuePad = self.frameQueue.get_static_pad('sink')
ret = ret and (frameTeePad.link(frameQueuePad) == Gst.PadLinkReturn.OK)
if not ret:
logging.critical('GST elements could not be linked')
sys.exit(1)
def setHost(self, host):
self._host = host
self.udpsink_rtpout.set_property('host', host)
self.udpsink_rtcpout.set_property('host', host)
def setPort(self, port):
self._port = port
self.udpsink_rtpout.set_property('port', port)
self.udpsink_rtcpout.set_property('port', port + 1)
self.udpsrc_rtcpin.set_property('port', port + 5)
def _newSample(self, sink): # callback функция, вызываемая при каждом приходящем кадре
if self._needFrame.is_set(): # если выставлен флаг нужен кадр
self._needFrame.clear() # сбрасываем флаг
sample = sink.emit('pull-sample')
sampleBuff = sample.get_buffer()
data = np.ndarray(
(self._scaleHeight, self._scaleWidth, 3),
buffer=sampleBuff.extract_dup(0, sampleBuff.get_size()), dtype=np.uint8)
# вызываем обработчик в качестве параметра передаем массив данных, ширина и высота кадра
# формат цвета RGB
self._onFrameCallback(data, self._scaleWidth, self._scaleHeight)
del sample
return Gst.FlowReturn.OK
def _onMessage(self, bus, message):
# print('Message: %s' % str(message.type))
t = message.type
if t == Gst.MessageType.EOS:
logging.info('Received EOS-Signal')
self.stop_pipeline()
elif t == Gst.MessageType.ERROR:
error, debug = message.parse_error()
logging.error('Received Error-Signal #%u: %s', error.code, debug)
self.null_pipeline()
# else:
# print('Message: %s' % str(t))
def play_pipeline(self):
self.pipeline.set_state(Gst.State.PLAYING)
logging.info('GST pipeline PLAYING')
logging.info('Streaming RTP on %s:%d', self._host, self._port)
def stop_pipeline(self):
self.pause_pipeline()
self.ready_pipeline()
def ready_pipeline(self):
self.pipeline.set_state(Gst.State.READY)
logging.info('GST pipeline READY')
def pause_pipeline(self):
self.pipeline.set_state(Gst.State.PAUSED)
logging.info('GST pipeline PAUSED')
def null_pipeline(self):
self.pipeline.set_state(Gst.State.NULL)
logging.info('GST pipeline NULL')
def write(self, s):
gstBuff = Gst.Buffer.new_wrapped(s)
if not (gstBuff is None):
self.appsrc.emit('push-buffer', gstBuff)
def flush(self):
self.stop_pipeline()
def frameRequest(self): # выставляем флаг запрос кадра, возвращает True, если запрос кадра удался
if not self._needFrame.is_set():
self._needFrame.set()
return True
return False
| [
"[email protected]"
] | |
c2835b1f8a3632284eca779d2dc1f17bfaf30295 | 6d501ea43b1a52bf4af44ae5677eba8b928ffec3 | /directory/signals.py | e1d22e0a309d7321f2db634715374ef5fabc6e4f | [] | no_license | mozilla/hive-django | 78d5e7bf687e2311a41d2b6d555b9671c4270b4d | bf95dce0af0148ecacde2256d235788fd79c7d5e | refs/heads/master | 2023-08-27T12:47:36.977377 | 2016-05-04T21:12:47 | 2016-05-04T21:12:47 | 55,106,672 | 0 | 2 | null | 2016-05-04T21:12:47 | 2016-03-31T00:12:58 | Python | UTF-8 | Python | false | false | 1,684 | py | from django.dispatch import receiver
from django.contrib.sites.models import Site
from django.db.models.signals import post_save
from django.contrib.auth.signals import user_logged_in
from django.contrib import messages
from registration.signals import user_activated
from .models import City, User, Organization, Membership, is_user_vouched_for
@receiver(post_save, sender=City)
def clear_site_cache_when_city_changes(**kwargs):
# It's possible that the site may be associated with a different
# city now, so clear the site cache.
Site.objects.clear_cache()
@receiver(post_save, sender=User)
def create_membership_for_user(sender, raw, instance, **kwargs):
if raw: return
if not len(Membership.objects.filter(user=instance)):
membership = Membership(user=instance)
membership.save()
@receiver(user_activated)
def auto_register_user_with_organization(sender, user, request, **kwargs):
if user.membership.organization: return
orgs = Organization.objects.possible_affiliations_for(user)
if orgs.count() != 1: return
org = orgs[0]
user.membership.organization = org
user.membership.save()
@receiver(user_logged_in)
def tell_user_to_update_their_profile(sender, user, request, **kwargs):
if not is_user_vouched_for(user): return
if not user.membership.bio:
messages.info(request,
'You don\'t have a bio! You should write one '
'so community members can learn more about you. '
'Just visit your user profile by accessing the '
'user menu at the top-right corner of this page.',
fail_silently=True)
| [
"[email protected]"
] | |
98616241fbdcb931bae105f55cdfe34251a2d974 | 26408f11b938a00f8b97a3e195095a45a12dc2c7 | /sneeze/Player.py | e1aecbdfcc7127f7257b5d20b68035b164acb822 | [] | no_license | cz-fish/sneeze-dodger | a7ea25e9267d408e8f46a9fb7a988d52dca8bd8e | 4fd333345d1f7d82c92ddcb15f18077362766844 | refs/heads/master | 2022-07-07T01:39:06.162125 | 2020-05-18T08:01:57 | 2020-05-18T08:01:57 | 255,090,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | py | from sneeze.Actor import Actor
from sneeze.Sprite import Sprite
from sneeze.Types import *
class Player(Actor):
def __init__(self):
super().__init__()
self.sprite = Sprite.load('guy')
def move(self, inputs: Inputs, collision) -> None:
self.update_speed(inputs.xvalue, inputs.yvalue)
new_pos = collision(self.pos, self.speed_vec)
if new_pos == self.pos:
self.speed_vec = Pos(0, 0)
self.move_to(new_pos)
# walk phase; reset if not moving
if abs(self.speed_vec.x) < 2 and abs(self.speed_vec.y) < 2:
self.animation = Animation('idle', 0)
else:
key, phase = self.animation
if key == 'walk':
self.animation = Animation(key, phase + 1)
else:
self.animation = Animation('walk', 0)
| [
"[email protected]"
] | |
c3b2ccf3279e3d6c131b50d1a8a089fc8ee00b32 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/BizListDataInfo.py | 5f874dfae528b4b6592ad1306c025ec59eb0239e | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,206 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class BizListDataInfo(object):
def __init__(self):
self._code = None
self._name = None
@property
def code(self):
return self._code
@code.setter
def code(self, value):
self._code = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
def to_alipay_dict(self):
params = dict()
if self.code:
if hasattr(self.code, 'to_alipay_dict'):
params['code'] = self.code.to_alipay_dict()
else:
params['code'] = self.code
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = BizListDataInfo()
if 'code' in d:
o.code = d['code']
if 'name' in d:
o.name = d['name']
return o
| [
"[email protected]"
] | |
0df491aaf04bd5efd3e1d19660af119f72bb10a1 | 93a959b0458bcdb60d33a4504f483078a78a56b6 | /CwnGraph/cwn_annotator.py | 7b58fe2e65d190fd9571cc70e6a5695b91cfcc2f | [] | no_license | kylecomtw/CwnGraph | a82d763a645c3342502274e6760cb63593f23d42 | 86ddb17de548a61c57f925fb2d783467431db18b | refs/heads/master | 2021-10-24T10:00:19.913420 | 2019-03-25T04:45:36 | 2019-03-25T04:45:36 | 84,843,165 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,050 | py | import os
import json
from datetime import datetime
from . import cwnio
from . import annot_merger
from .cwn_types import *
from .cwn_graph_utils import CwnGraphUtils
class CwnAnnotator:
PREFIX = "annot/cwn_annot"
def __init__(self, cgu, session_name):
self.parent_cgu = cgu
self.name = session_name
self.V = {}
self.E = {}
self.meta = {
"session_name": session_name,
"timestamp": "",
"serial": 0,
"base_hash": cgu.get_hash()
}
self.load(session_name)
def load(self, name):
fpath = f"{CwnAnnotator.PREFIX}_{name}.json"
if os.path.exists(fpath):
print("loading saved session from ", fpath)
self.meta, self.V, self.E = \
cwnio.load_annot_json(fpath)
base_hash = self.meta.get("base_hash", "")
if base_hash and base_hash != self.parent_cgu.get_hash():
print("WARNING: loading with a different base image")
return True
else:
print("Creating new session", name)
return False
def save(self, with_timestamp=False):
name = self.meta["session_name"]
timestamp = datetime.now().strftime("%y%m%d%H%M%S")
self.meta["snapshot"] = timestamp
cwnio.ensure_dir("annot")
if with_timestamp:
cwnio.dump_annot_json(self.meta, self.V, self.E,
f"{CwnAnnotator.PREFIX}_{name}_{timestamp}.json")
else:
cwnio.dump_annot_json(self.meta, self.V, self.E,
f"{CwnAnnotator.PREFIX}_{name}.json")
def new_node_id(self):
serial = self.meta.get("serial", 0) + 1
session_name = self.meta.get("session_name", "")
self.meta["serial"] = serial
return f"{session_name}_{serial:06d}"
def create_lemma(self, lemma):
node_id = self.new_node_id()
new_lemma = CwnLemma(node_id, self)
new_lemma.lemma = lemma
self.set_lemma(new_lemma)
return new_lemma
def create_sense(self, definition):
node_id = self.new_node_id()
new_sense = CwnSense(node_id, self)
new_sense.definition = definition
self.set_sense(new_sense)
return new_sense
def create_relation(self, src_id, tgt_id, rel_type):
if not self.get_node_data(src_id):
raise ValueError(f"{src_id} not found")
if not self.get_node_data(tgt_id):
raise ValueError(f"{tgt_id} not found")
edge_id = (src_id, tgt_id)
new_rel = CwnRelation(edge_id, self)
new_rel.relation_type = rel_type
self.set_relation(new_rel)
return new_rel
def set_lemma(self, cwn_lemma):
self.V[cwn_lemma.id] = cwn_lemma.data()
def set_sense(self, cwn_sense):
self.V[cwn_sense.id] = cwn_sense.data()
def set_relation(self, cwn_relation):
self.E[cwn_relation.id] = cwn_relation.data()
def remove_lemma(self, cwn_lemma):
cwn_lemma.action = "delete"
self.set_lemma(cwn_lemma)
def remove_sense(self, cwn_sense):
cwn_sense.action = "delete"
self.set_sense(cwn_sense)
def remove_relation(self, cwn_relation):
cwn_relation.action = "delete"
self.set_relation(cwn_relation)
def find_glyph(self, instr):
return self.parent_cgu.find_glyph(instr)
def find_senses(self, lemma="", definition="", examples=""):
cgu = CwnGraphUtils(self.V, self.E)
senses = cgu.find_senses(lemma, defintion, examples)
parent_senses = self.parent_cgu.find_senses(lemma, definition, examples)
ret = annot_merger.merge(senses, parent_senses, self)
return ret
def find_lemmas(self, instr_regex):
cgu = CwnGraphUtils(self.V, self.E)
lemmas = cgu.find_lemma(instr_regex)
parent_lemmas = self.parent_cgu.find_lemma(instr_regex)
ret = annot_merger.merge(lemmas, parent_lemmas, self)
return ret
def find_edges(self, node_id, is_directed = True):
cgu = CwnGraphUtils(self.V, self.E)
edges = cgu.find_edges(node_id, is_directed)
parent_edges = self.parent_cgu.find_edges(node_id, is_directed)
ret = annot_merger.merge(edges, parent_edges, self)
return ret
def get_node_data(self, node_id):
node_data = self.V.get(node_id, {})
if not node_data:
node_data = self.parent_cgu.get_node_data(node_id)
return node_data
def get_edge_data(self, edge_id):
edge_data = self.E.get(edge_id, {})
if not edge_data:
edge_data = self.parent_cgu.get_edge_data(edge_id)
return edge_data
def connected(self, node_id, is_directed = True, maxConn=100, sense_only=True):
raise NotImplementedError("connected() is not implemented in CwnAnnotator")
| [
"[email protected]"
] | |
127f14137ff8c69323cb99a5ec67d900927cca5e | 4b17225bc3860419edb6a8818bbac82e6b36e79d | /employee_tracker/settings.py | ce1b5d600785fc29625c723fdb419d1d986f35e8 | [] | no_license | argon2008-aiti/employee_tracker | 8ab45ee727e07b242d6ac3fb446ca5c1b9649bb0 | 5be7c3bb323f3b350d26df4d4813b6b071324277 | refs/heads/master | 2021-01-15T13:00:03.644233 | 2016-10-06T16:42:28 | 2016-10-06T16:42:28 | 35,000,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,444 | py | """
Django settings for employee_tracker project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cdkxms9u50qs@ig3j3s771u55ntlvxp2h8pijlx2rr83ms)#7q'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = [".herokuapp.com"]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'monitor',
'leaflet',
'djgeojson',
'django_ajax',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'employee_tracker.urls'
WSGI_APPLICATION = 'employee_tracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# for graphviz
GRAPH_MODELS = {
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
# static file directories
STATICFILES_DIRS = (
('assets', 'static'),
)
# base url at which static files are served
STATIC_URL = '/assets/'
STATIC_ROOT = os.path.join(BASE_DIR,'assets')
LOGIN_URL = '/login'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
# Template files (html+django templates)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, "templates"),
)
# Production code
if DEBUG==False:
#parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
| [
"[email protected]"
] | |
619dff3fc4f5f34f2ea4f843e80b2f4d472b30d0 | b6bcfd935f7876fc65416e7340fda1c9b0516fd7 | /pyscf/tdscf/test/test_tduks.py | 091c6b43834c92a378e73acb5b9bd073aa22e37b | [
"Apache-2.0"
] | permissive | lzypotato/pyscf | 62f849b9a3ec8480c3da63a5822ea780608796b2 | 94c21e2e9745800c7efc7256de0d628fc60afc36 | refs/heads/master | 2020-09-06T22:45:04.191935 | 2019-06-18T06:04:48 | 2019-06-18T06:04:48 | 220,578,540 | 1 | 0 | Apache-2.0 | 2019-11-09T02:13:16 | 2019-11-09T02:13:15 | null | UTF-8 | Python | false | false | 13,906 | py | #!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <[email protected]>
#
import unittest
import numpy
import copy
from pyscf import lib, gto, scf, dft
from pyscf import tdscf
mol = gto.Mole()
mol.verbose = 5
mol.output = '/dev/null'
mol.atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ]
mol.spin = 2
mol.basis = '631g'
mol.build()
mf = scf.UHF(mol).run()
td_hf = tdscf.TDHF(mf).run(conv_tol=1e-12)
mf_lda = dft.UKS(mol).set(xc='lda', conv_tol=1e-12)
mf_lda.grids.prune = None
mf_lda = mf_lda.newton().run()
mf_bp86 = dft.UKS(mol).set(xc='b88,p86', conv_tol=1e-12)
mf_bp86.grids.prune = None
mf_bp86 = mf_bp86.newton().run()
mf_b3lyp = dft.UKS(mol).set(xc='b3lyp', conv_tol=1e-12)
mf_b3lyp.grids.prune = None
mf_b3lyp = mf_b3lyp.newton().run()
def diagonalize(a, b, nroots=4):
a_aa, a_ab, a_bb = a
b_aa, b_ab, b_bb = b
nocc_a, nvir_a, nocc_b, nvir_b = a_ab.shape
a_aa = a_aa.reshape((nocc_a*nvir_a,nocc_a*nvir_a))
a_ab = a_ab.reshape((nocc_a*nvir_a,nocc_b*nvir_b))
a_bb = a_bb.reshape((nocc_b*nvir_b,nocc_b*nvir_b))
b_aa = b_aa.reshape((nocc_a*nvir_a,nocc_a*nvir_a))
b_ab = b_ab.reshape((nocc_a*nvir_a,nocc_b*nvir_b))
b_bb = b_bb.reshape((nocc_b*nvir_b,nocc_b*nvir_b))
a = numpy.bmat([[ a_aa , a_ab],
[ a_ab.T, a_bb]])
b = numpy.bmat([[ b_aa , b_ab],
[ b_ab.T, b_bb]])
e = numpy.linalg.eig(numpy.bmat([[a , b ],
[-b.conj(),-a.conj()]]))[0]
lowest_e = numpy.sort(e[e.real > 0].real)[:nroots]
lowest_e = lowest_e[lowest_e > 1e-3]
return lowest_e
def tearDownModule():
global mol, mf, td_hf, mf_lda, mf_bp86, mf_b3lyp
mol.stdout.close()
del mol, mf, td_hf, mf_lda, mf_bp86, mf_b3lyp
class KnownValues(unittest.TestCase):
def test_nohbrid_lda(self):
td = tdscf.uks.TDDFTNoHybrid(mf_lda).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0]
a,b = td.get_ab()
e_ref = diagonalize(a, b, 6)
self.assertAlmostEqual(abs(es[:3]-e_ref[:3]).max(), 0, 8)
self.assertAlmostEqual(lib.finger(es[:3]*27.2114), 1.2946309669294163, 6)
def test_nohbrid_b88p86(self):
td = tdscf.uks.TDDFTNoHybrid(mf_bp86).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0]
a,b = td.get_ab()
e_ref = diagonalize(a, b, 6)
self.assertAlmostEqual(abs(es[:3]-e_ref[:3]).max(), 0, 8)
self.assertAlmostEqual(lib.finger(es[:3]*27.2114), 1.4624730971221087, 6)
def test_tddft_lda(self):
td = tdscf.uks.TDDFT(mf_lda).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0] * 27.2114
self.assertAlmostEqual(lib.finger(es[:3]), 1.2946309669294163, 6)
def test_tddft_b88p86(self):
td = tdscf.uks.TDDFT(mf_bp86).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0] * 27.2114
self.assertAlmostEqual(lib.finger(es[:3]), 1.4624730971221087, 6)
def test_tddft_b3lyp(self):
td = tdscf.uks.TDDFT(mf_b3lyp).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0] * 27.2114
self.assertAlmostEqual(lib.finger(es[:3]), 1.2984822994759448, 6)
def test_tda_b3lyp(self):
td = tdscf.TDA(mf_b3lyp).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0] * 27.2114
self.assertAlmostEqual(lib.finger(es[:3]), 1.4303636271767162, 6)
def test_tda_lda(self):
td = tdscf.TDA(mf_lda).set(conv_tol=1e-12)
es = td.kernel(nstates=4)[0] * 27.2114
self.assertAlmostEqual(lib.finger(es[:3]), 1.4581538269747121, 6)
def test_ab_hf(self):
mf = scf.UHF(mol).run()
a, b = tdscf.TDDFT(mf).get_ab()
ftda = tdscf.uhf.gen_tda_operation(mf)[0]
ftdhf = tdscf.uhf.gen_tdhf_operation(mf)[0]
nocc_a = numpy.count_nonzero(mf.mo_occ[0] == 1)
nvir_a = numpy.count_nonzero(mf.mo_occ[0] == 0)
nocc_b = numpy.count_nonzero(mf.mo_occ[1] == 1)
nvir_b = numpy.count_nonzero(mf.mo_occ[1] == 0)
numpy.random.seed(2)
xa, ya = numpy.random.random((2,nocc_a,nvir_a))
xb, yb = numpy.random.random((2,nocc_b,nvir_b))
x = numpy.hstack((xa.ravel(), xb.ravel()))
y = numpy.hstack((ya.ravel(), yb.ravel()))
xy = numpy.hstack((x, y))
ax_a = numpy.einsum('iajb,jb->ia', a[0], xa)
ax_a+= numpy.einsum('iajb,jb->ia', a[1], xb)
ax_b = numpy.einsum('jbia,jb->ia', a[1], xa)
ax_b+= numpy.einsum('iajb,jb->ia', a[2], xb)
ax = numpy.hstack((ax_a.ravel(), ax_b.ravel()))
self.assertAlmostEqual(abs(ax - ftda([x])).max(), 0, 9)
ay_a = numpy.einsum('iajb,jb->ia', a[0], ya)
ay_a+= numpy.einsum('iajb,jb->ia', a[1], yb)
ay_b = numpy.einsum('jbia,jb->ia', a[1], ya)
ay_b+= numpy.einsum('iajb,jb->ia', a[2], yb)
ay = numpy.hstack((ay_a.ravel(), ay_b.ravel()))
bx_a = numpy.einsum('iajb,jb->ia', b[0], xa)
bx_a+= numpy.einsum('iajb,jb->ia', b[1], xb)
bx_b = numpy.einsum('jbia,jb->ia', b[1], xa)
bx_b+= numpy.einsum('iajb,jb->ia', b[2], xb)
bx = numpy.hstack((bx_a.ravel(), bx_b.ravel()))
by_a = numpy.einsum('iajb,jb->ia', b[0], ya)
by_a+= numpy.einsum('iajb,jb->ia', b[1], yb)
by_b = numpy.einsum('jbia,jb->ia', b[1], ya)
by_b+= numpy.einsum('iajb,jb->ia', b[2], yb)
by = numpy.hstack((by_a.ravel(), by_b.ravel()))
ab1 = ax + by
ab2 =-bx - ay
ab12 = numpy.hstack((ab1.ravel(),ab2.ravel()))
abxy_ref = ftdhf([xy])
self.assertAlmostEqual(abs(ab12 - abxy_ref).max(), 0, 9)
def test_ab_lda(self):
mf = mf_lda
a, b = tdscf.TDDFT(mf).get_ab()
ftda = tdscf.uhf.gen_tda_operation(mf)[0]
ftdhf = tdscf.uhf.gen_tdhf_operation(mf)[0]
nocc_a = numpy.count_nonzero(mf.mo_occ[0] == 1)
nvir_a = numpy.count_nonzero(mf.mo_occ[0] == 0)
nocc_b = numpy.count_nonzero(mf.mo_occ[1] == 1)
nvir_b = numpy.count_nonzero(mf.mo_occ[1] == 0)
numpy.random.seed(2)
xa, ya = numpy.random.random((2,nocc_a,nvir_a))
xb, yb = numpy.random.random((2,nocc_b,nvir_b))
x = numpy.hstack((xa.ravel(), xb.ravel()))
y = numpy.hstack((ya.ravel(), yb.ravel()))
xy = numpy.hstack((x, y))
ax_a = numpy.einsum('iajb,jb->ia', a[0], xa)
ax_a+= numpy.einsum('iajb,jb->ia', a[1], xb)
ax_b = numpy.einsum('jbia,jb->ia', a[1], xa)
ax_b+= numpy.einsum('iajb,jb->ia', a[2], xb)
ax = numpy.hstack((ax_a.ravel(), ax_b.ravel()))
self.assertAlmostEqual(abs(ax - ftda([x])).max(), 0, 9)
ay_a = numpy.einsum('iajb,jb->ia', a[0], ya)
ay_a+= numpy.einsum('iajb,jb->ia', a[1], yb)
ay_b = numpy.einsum('jbia,jb->ia', a[1], ya)
ay_b+= numpy.einsum('iajb,jb->ia', a[2], yb)
ay = numpy.hstack((ay_a.ravel(), ay_b.ravel()))
bx_a = numpy.einsum('iajb,jb->ia', b[0], xa)
bx_a+= numpy.einsum('iajb,jb->ia', b[1], xb)
bx_b = numpy.einsum('jbia,jb->ia', b[1], xa)
bx_b+= numpy.einsum('iajb,jb->ia', b[2], xb)
bx = numpy.hstack((bx_a.ravel(), bx_b.ravel()))
by_a = numpy.einsum('iajb,jb->ia', b[0], ya)
by_a+= numpy.einsum('iajb,jb->ia', b[1], yb)
by_b = numpy.einsum('jbia,jb->ia', b[1], ya)
by_b+= numpy.einsum('iajb,jb->ia', b[2], yb)
by = numpy.hstack((by_a.ravel(), by_b.ravel()))
ab1 = ax + by
ab2 =-bx - ay
ab12 = numpy.hstack((ab1.ravel(),ab2.ravel()))
abxy_ref = ftdhf([xy])
self.assertAlmostEqual(abs(ab12 - abxy_ref).max(), 0, 9)
def test_ab_b3lyp(self):
mf = mf_b3lyp
a, b = tdscf.TDDFT(mf).get_ab()
ftda = tdscf.uhf.gen_tda_operation(mf)[0]
ftdhf = tdscf.uhf.gen_tdhf_operation(mf)[0]
nocc_a = numpy.count_nonzero(mf.mo_occ[0] == 1)
nvir_a = numpy.count_nonzero(mf.mo_occ[0] == 0)
nocc_b = numpy.count_nonzero(mf.mo_occ[1] == 1)
nvir_b = numpy.count_nonzero(mf.mo_occ[1] == 0)
numpy.random.seed(2)
xa, ya = numpy.random.random((2,nocc_a,nvir_a))
xb, yb = numpy.random.random((2,nocc_b,nvir_b))
x = numpy.hstack((xa.ravel(), xb.ravel()))
y = numpy.hstack((ya.ravel(), yb.ravel()))
xy = numpy.hstack((x, y))
ax_a = numpy.einsum('iajb,jb->ia', a[0], xa)
ax_a+= numpy.einsum('iajb,jb->ia', a[1], xb)
ax_b = numpy.einsum('jbia,jb->ia', a[1], xa)
ax_b+= numpy.einsum('iajb,jb->ia', a[2], xb)
ax = numpy.hstack((ax_a.ravel(), ax_b.ravel()))
self.assertAlmostEqual(abs(ax - ftda([x])).max(), 0, 9)
ay_a = numpy.einsum('iajb,jb->ia', a[0], ya)
ay_a+= numpy.einsum('iajb,jb->ia', a[1], yb)
ay_b = numpy.einsum('jbia,jb->ia', a[1], ya)
ay_b+= numpy.einsum('iajb,jb->ia', a[2], yb)
ay = numpy.hstack((ay_a.ravel(), ay_b.ravel()))
bx_a = numpy.einsum('iajb,jb->ia', b[0], xa)
bx_a+= numpy.einsum('iajb,jb->ia', b[1], xb)
bx_b = numpy.einsum('jbia,jb->ia', b[1], xa)
bx_b+= numpy.einsum('iajb,jb->ia', b[2], xb)
bx = numpy.hstack((bx_a.ravel(), bx_b.ravel()))
by_a = numpy.einsum('iajb,jb->ia', b[0], ya)
by_a+= numpy.einsum('iajb,jb->ia', b[1], yb)
by_b = numpy.einsum('jbia,jb->ia', b[1], ya)
by_b+= numpy.einsum('iajb,jb->ia', b[2], yb)
by = numpy.hstack((by_a.ravel(), by_b.ravel()))
ab1 = ax + by
ab2 =-bx - ay
ab12 = numpy.hstack((ab1.ravel(),ab2.ravel()))
abxy_ref = ftdhf([xy])
self.assertAlmostEqual(abs(ab12 - abxy_ref).max(), 0, 9)
def test_nto(self):
mf = scf.UHF(mol).run()
td = tdscf.TDA(mf).run()
w, nto = td.get_nto(state=1)
self.assertAlmostEqual(w[0][0], 0.00018520143461015, 9)
self.assertAlmostEqual(w[1][0], 0.99963372674044326, 9)
self.assertAlmostEqual(lib.finger(w[0]), 0.00027305600430816, 9)
self.assertAlmostEqual(lib.finger(w[1]), 0.99964370569529093, 9)
pmol = copy.copy(mol)
pmol.symmetry = True
pmol.build(0, 0)
mf = scf.UHF(pmol).run()
td = tdscf.TDA(mf).run(nstates=3)
w, nto = td.get_nto(state=0)
self.assertAlmostEqual(w[0][0], 0.00018520143461016, 9)
self.assertAlmostEqual(w[1][0], 0.99963372674044326, 9)
self.assertAlmostEqual(lib.finger(w[0]), 0.00027305600430816, 9)
self.assertAlmostEqual(lib.finger(w[1]), 0.99964370569529093, 9)
w, nto = td.get_nto(state=-1)
self.assertAlmostEqual(w[0][0], 0.00236940007134660, 9)
self.assertAlmostEqual(w[1][0], 0.99759687228056182, 9)
def test_analyze(self):
f = td_hf.oscillator_strength(gauge='length')
self.assertAlmostEqual(lib.finger(f), 0.16147450863004867, 7)
f = td_hf.oscillator_strength(gauge='velocity', order=2)
self.assertAlmostEqual(lib.finger(f), 0.19750347627735745, 6)
td_hf.analyze()
def test_init(self):
hf = scf.UHF(mol)
ks = scf.UKS(mol)
kshf = scf.UKS(mol).set(xc='HF')
self.assertTrue(isinstance(tdscf.TDA(hf), tdscf.uhf.TDA))
self.assertTrue(isinstance(tdscf.TDA(ks), tdscf.uks.TDA))
self.assertTrue(isinstance(tdscf.TDA(kshf), tdscf.uks.TDA))
self.assertTrue(isinstance(tdscf.RPA(hf), tdscf.uhf.TDHF))
self.assertTrue(isinstance(tdscf.RPA(ks), tdscf.uks.TDDFTNoHybrid))
self.assertTrue(isinstance(tdscf.RPA(kshf), tdscf.uks.TDDFT))
self.assertTrue(isinstance(tdscf.TDDFT(hf), tdscf.uhf.TDHF))
self.assertTrue(isinstance(tdscf.TDDFT(ks), tdscf.uks.TDDFTNoHybrid))
self.assertTrue(isinstance(tdscf.TDDFT(kshf), tdscf.uks.TDDFT))
self.assertRaises(RuntimeError, tdscf.dRPA, hf)
self.assertTrue(isinstance(tdscf.dRPA(kshf), tdscf.uks.dRPA))
self.assertTrue(isinstance(tdscf.dRPA(ks), tdscf.uks.dRPA))
self.assertRaises(RuntimeError, tdscf.dTDA, hf)
self.assertTrue(isinstance(tdscf.dTDA(kshf), tdscf.uks.dTDA))
self.assertTrue(isinstance(tdscf.dTDA(ks), tdscf.uks.dTDA))
def test_tda_with_wfnsym(self):
pmol = mol.copy()
pmol.symmetry = True
pmol.build(0, 0)
mf = dft.UKS(pmol).run()
td = tdscf.uks.TDA(mf)
td.wfnsym = 'B1'
es = td.kernel(nstates=3)[0]
self.assertAlmostEqual(lib.finger(es), 0.16350926466999033, 6)
td.analyze()
def test_tdhf_with_wfnsym(self):
pmol = mol.copy()
pmol.symmetry = True
pmol.build()
mf = scf.UHF(pmol).run()
td = tdscf.uhf.TDHF(mf)
td.wfnsym = 'B1'
td.nroots = 3
es = td.kernel()[0]
self.assertAlmostEqual(lib.finger(es), 0.11306948533259675, 6)
td.analyze()
def test_tddft_with_wfnsym(self):
pmol = mol.copy()
pmol.symmetry = True
pmol.build()
mf = dft.UKS(pmol).run()
td = tdscf.uks.TDDFTNoHybrid(mf)
td.wfnsym = 'B1'
td.nroots = 3
es = td.kernel()[0]
self.assertAlmostEqual(lib.finger(es), 0.15403661700414412, 6)
td.analyze()
if __name__ == "__main__":
print("Full Tests for TD-UKS")
unittest.main()
| [
"[email protected]"
] | |
0c448d5d9533485b666d5f11510eb4bdf0e13294 | 9fa07ba96a5330712bb1f1d0874375e6f4923ce7 | /wait/www/387.py | 3d6ab8263419dea2fd32e7413af8f4570a1f4842 | [] | no_license | Ajatars/Ajatar | cf4460d881b18095ce968c883e68500d44f90570 | 943b71285e6b74ae38861aa305d26b0a9bef4050 | refs/heads/master | 2020-06-02T02:14:05.989075 | 2019-06-10T02:48:10 | 2019-06-10T02:48:10 | 191,002,958 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
POC Name : Mvmmall search.php SQL Injection
Reference : http://www.wooyun.org/bugs/wooyun-2011-01732
Author : NoName
"""
import re
from urllib.parse import urlparse
def assign(service, arg):
if service == "www":
r = urlparse(arg)
return True, '%s://%s/' % (r.scheme, r.netloc)
def audit(arg):
payload = "search.php?tag_ids[goods_id]=uid))%20and(select%201%20from(select%20count(*),concat((select%20(select%20md5(12345))%20from%20information_schema.tables%20limit%200,1),floor(rand(0)*2))x%20from%20information_schema.tables%20group%20by%20x)a)%20and%201=1%23"
code, head, res, errcode, _ = curl.curl(arg + payload)
if code == 200:
m = re.search("827ccb0eea8a706c4c34a16891f84e7b1",res)
if m:
security_hole('Mvmmall search.php SQL Injection exists.')
if __name__ == '__main__':
from dummy import *
audit(assign('www', 'http://dajiamai.com/')[1]) | [
"[email protected]"
] | |
707062ffa62600fed5892717cfc5efb6677b3277 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_plough.py | 8524ffbb0f26cf406e78e16dbed5ed7ccee77fc1 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py |
#calss header
class _PLOUGH():
def __init__(self,):
self.name = "PLOUGH"
self.definitions = [u'a large farming tool with blades that digs the soil in fields so that seeds can be planted', u'If land is under the plough, crops are grown on it: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
816403dc9d93b4276bffb4d8e162c51ea13231b8 | 0be45470f15f12872d81f98c72e3b8528100ad27 | /pointCollection/tools/RDE.py | 563e437d633d241e661519931619d6cf3b3cf410 | [
"MIT"
] | permissive | SmithB/pointCollection | 19a43bb19b1753542f693645fe4f537c2dbf7af9 | 026a60eb7e2fbe5333c7a30bd8299dda44c5878e | refs/heads/master | 2023-08-23T18:56:49.943934 | 2023-08-18T16:41:12 | 2023-08-18T16:41:12 | 220,045,965 | 4 | 8 | MIT | 2023-07-03T15:47:58 | 2019-11-06T16:51:04 | Jupyter Notebook | UTF-8 | Python | false | false | 584 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 23 16:31:30 2017
@author: ben
"""
import numpy as np
def RDE(x):
xs=x.copy()
xs=np.isfinite(xs) # this changes xs from values to a boolean
if np.sum(xs)<2 :
return np.nan
ind=np.arange(0.5, np.sum(xs))
LH=np.interp(np.array([0.16, 0.84])*np.sum(xs), ind, np.sort(x[xs]))
#print('LH =',LH)
return (LH[1]-LH[0])/2. # trying to get some kind of a width of the data ~variance
#import scipy.stats as stats
#def RDE(x):
# return (stats.scoreatpercentile(x, 84 )-stats.scoreatpercentile(x, 16))/2. | [
"[email protected]"
] | |
ec81f69f8b35b27ca38c0fabe125ba6ef4bc3a1d | 1975ee674b36084366b1bbe2c091d8f0f8795dc0 | /demo/class_views.py | 49ac0086b684256a0215318d23d4992296ad6f5e | [] | no_license | srikanthpragada/PYTHON_03_JULY_2018_WEBDEMO | f193213788deadcab7ac7b183328269ba1334488 | 56e076ad30703117cafc56d6d95449c6ec8eebb2 | refs/heads/master | 2020-03-25T11:45:53.128704 | 2018-08-23T15:29:05 | 2018-08-23T15:29:05 | 143,747,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 877 | py | from django.views.generic import TemplateView, ListView
from django.shortcuts import render
from .forms import LoginForm
from .models import Course
class ClassView1(TemplateView):
template_name = 'class_view1.html'
class LoginView(TemplateView):
template_name = 'login.html'
def get(self, request):
form = LoginForm()
return render(request, self.template_name, {'form': form})
def post(self, request):
form = LoginForm(request.POST)
if form.is_valid():
print(form.cleaned_data['username'], form.cleaned_data['password'])
return render(request, self.template_name, {'form': form})
# Generic View - ListView demo
class ListCourseView(ListView):
model = Course
template_name = "courses.html" # default is demo/course_list.html
context_object_name = 'courses' # default is object_list
| [
"[email protected]"
] | |
275aa3e362920aae1e2af84fe0380f36fa448f39 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/pygame/pygameweb/pygameweb/db.py | 57c70ca70133b811d4447037d0df7cd54b72e632 | [
"BSD-2-Clause"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:95c026dc0e7051336cd999158979e81f159d4470489660469d0e0175c66400da
size 1274
| [
"[email protected]"
] | |
e7e2e35e74f6f746945d6189c17e6e7c5bf68ec4 | 4c852fab792606580acb3f3a61b7f86ae25930b0 | /Python/MIT-CompThinking/MITx600.1x/ProblemSets/wk3/L5PROBLEM5.py | 5fc93127f17702a2607600df981bd5e7b2f929a5 | [] | no_license | hmchen47/Programming | a9767a78a35c0844a1366391f48b205ff1588591 | 9637e586eee5c3c751c96bfc5bc1d098ea5b331c | refs/heads/master | 2022-05-01T01:57:46.573136 | 2021-08-09T04:29:40 | 2021-08-09T04:29:40 | 118,053,509 | 2 | 1 | null | 2021-09-20T19:54:02 | 2018-01-19T00:06:04 | Python | UTF-8 | Python | false | false | 259 | py | #!/usr/bin/python
# _*_ coding = UTF-8 _*_
def gcdRecur(a, b):
'''
a, b: positive integers
returns: a positive integer, the greatest common divisor of a & b.
'''
if b == 0:
return a
else:
return gcdRecur(b, a % b) | [
"[email protected]"
] | |
055aabb9ef9a32291d0e6edb97d8a581f7df3962 | 2509936d814fb6cdd283c2549c518c8dfad9450c | /api/staticdata/regimes/migrations/0010_merge_20221214_1035.py | 81daedc733cfa1f2e70025a26480bb78e0acf8fd | [
"MIT"
] | permissive | uktrade/lite-api | 19f829119fa96de3f4862eb233845508b0fef7eb | b35792fc981220285ed9a7b3659aba460f1b207a | refs/heads/dev | 2023-08-25T10:11:17.594001 | 2023-08-24T14:24:43 | 2023-08-24T14:24:43 | 172,914,199 | 4 | 3 | MIT | 2023-09-14T17:36:47 | 2019-02-27T12:46:22 | Python | UTF-8 | Python | false | false | 277 | py | # Generated by Django 3.2.16 on 2022-12-14 10:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("regimes", "0009_update_cwc_shortened_names"),
("regimes", "0009_update_nsg_regimes"),
]
operations = []
| [
"[email protected]"
] | |
946eaac05979a4f663b7fefeba08d4f1dd8efb16 | d21c924fc23b812aaedeb2cfa3dfb108535a507f | /tw2/jqplugins/fg/defaults.py | 310f64fb53f192fa733e55b3ba04ea7270501562 | [] | no_license | toscawidgets/tw2.jqplugins.fg | eba3a90949c59dd7c6b3740ab09faa9b5d824a6d | 8317f3bec82364b95e86aa3655c7f787b25d715f | refs/heads/master | 2020-05-17T12:13:14.385977 | 2011-11-04T15:41:50 | 2011-11-04T15:41:50 | 954,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | #jQuery.ui
_fg_dirname_ = 'jquery/fg/%(subdir)s'
| [
"[email protected]"
] | |
8158442771c431dd35672a9edc586edd0fe33d1d | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/leetcode/leetCode/BreadthFirstSearch/103_BinaryTreeZigzagLevelOrderTraversal.py | 4445a0088162de197a6843a1be5b63a07388215c | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 797 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
c.. Solution o..
___ zigzagLevelOrder root
__ n.. root:
r_ []
left2right = 1
# 1. scan the level from left to right. -1 reverse.
ans, stack, temp # list, [root], []
_____ stack:
temp = [node.val ___ node __ stack]
stack = [child ___ node __ stack
___ child __ (node.left, node.right) __ child]
ans += [temp[::left2right]] # Pythonic way
left2right *= -1
r_ ans
"""
[]
[1]
[1,2,3]
[0,1,2,3,4,5,6,null,null,7,null,8,9,null,10]
"""
| [
"[email protected]"
] | |
79848a0117879783d1f2f0c37b6a8586c18147c6 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/IPV6-TCP-MIB.py | ae7c821868888b0850cd5394fcb2bb61fbdbaeb3 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 5,095 | py | #
# PySNMP MIB module IPV6-TCP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/IPV6-TCP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:45:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint")
Ipv6Address, Ipv6IfIndexOrZero = mibBuilder.importSymbols("IPV6-TC", "Ipv6Address", "Ipv6IfIndexOrZero")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, experimental, ObjectIdentity, Gauge32, Counter64, Counter32, Bits, NotificationType, IpAddress, ModuleIdentity, Integer32, iso, TimeTicks, Unsigned32, mib_2, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "experimental", "ObjectIdentity", "Gauge32", "Counter64", "Counter32", "Bits", "NotificationType", "IpAddress", "ModuleIdentity", "Integer32", "iso", "TimeTicks", "Unsigned32", "mib-2", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ipv6TcpMIB = ModuleIdentity((1, 3, 6, 1, 3, 86))
ipv6TcpMIB.setRevisions(('2017-02-22 00:00', '1998-01-29 00:00',))
if mibBuilder.loadTexts: ipv6TcpMIB.setLastUpdated('201702220000Z')
if mibBuilder.loadTexts: ipv6TcpMIB.setOrganization('IETF IPv6 MIB Working Group')
tcp = MibIdentifier((1, 3, 6, 1, 2, 1, 6))
ipv6TcpConnTable = MibTable((1, 3, 6, 1, 2, 1, 6, 16), )
if mibBuilder.loadTexts: ipv6TcpConnTable.setStatus('obsolete')
ipv6TcpConnEntry = MibTableRow((1, 3, 6, 1, 2, 1, 6, 16, 1), ).setIndexNames((0, "IPV6-TCP-MIB", "ipv6TcpConnLocalAddress"), (0, "IPV6-TCP-MIB", "ipv6TcpConnLocalPort"), (0, "IPV6-TCP-MIB", "ipv6TcpConnRemAddress"), (0, "IPV6-TCP-MIB", "ipv6TcpConnRemPort"), (0, "IPV6-TCP-MIB", "ipv6TcpConnIfIndex"))
if mibBuilder.loadTexts: ipv6TcpConnEntry.setStatus('obsolete')
ipv6TcpConnLocalAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 1), Ipv6Address())
if mibBuilder.loadTexts: ipv6TcpConnLocalAddress.setStatus('obsolete')
ipv6TcpConnLocalPort = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: ipv6TcpConnLocalPort.setStatus('obsolete')
ipv6TcpConnRemAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 3), Ipv6Address())
if mibBuilder.loadTexts: ipv6TcpConnRemAddress.setStatus('obsolete')
ipv6TcpConnRemPort = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: ipv6TcpConnRemPort.setStatus('obsolete')
ipv6TcpConnIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 5), Ipv6IfIndexOrZero())
if mibBuilder.loadTexts: ipv6TcpConnIfIndex.setStatus('obsolete')
ipv6TcpConnState = MibTableColumn((1, 3, 6, 1, 2, 1, 6, 16, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("closed", 1), ("listen", 2), ("synSent", 3), ("synReceived", 4), ("established", 5), ("finWait1", 6), ("finWait2", 7), ("closeWait", 8), ("lastAck", 9), ("closing", 10), ("timeWait", 11), ("deleteTCB", 12)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipv6TcpConnState.setStatus('obsolete')
ipv6TcpConformance = MibIdentifier((1, 3, 6, 1, 3, 86, 2))
ipv6TcpCompliances = MibIdentifier((1, 3, 6, 1, 3, 86, 2, 1))
ipv6TcpGroups = MibIdentifier((1, 3, 6, 1, 3, 86, 2, 2))
ipv6TcpCompliance = ModuleCompliance((1, 3, 6, 1, 3, 86, 2, 1, 1)).setObjects(("IPV6-TCP-MIB", "ipv6TcpGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ipv6TcpCompliance = ipv6TcpCompliance.setStatus('obsolete')
ipv6TcpGroup = ObjectGroup((1, 3, 6, 1, 3, 86, 2, 2, 1)).setObjects(("IPV6-TCP-MIB", "ipv6TcpConnState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ipv6TcpGroup = ipv6TcpGroup.setStatus('obsolete')
mibBuilder.exportSymbols("IPV6-TCP-MIB", ipv6TcpConnTable=ipv6TcpConnTable, ipv6TcpConnEntry=ipv6TcpConnEntry, ipv6TcpMIB=ipv6TcpMIB, ipv6TcpGroups=ipv6TcpGroups, ipv6TcpConnIfIndex=ipv6TcpConnIfIndex, tcp=tcp, ipv6TcpConnRemPort=ipv6TcpConnRemPort, ipv6TcpConformance=ipv6TcpConformance, PYSNMP_MODULE_ID=ipv6TcpMIB, ipv6TcpConnState=ipv6TcpConnState, ipv6TcpConnRemAddress=ipv6TcpConnRemAddress, ipv6TcpConnLocalPort=ipv6TcpConnLocalPort, ipv6TcpCompliances=ipv6TcpCompliances, ipv6TcpConnLocalAddress=ipv6TcpConnLocalAddress, ipv6TcpCompliance=ipv6TcpCompliance, ipv6TcpGroup=ipv6TcpGroup)
| [
"[email protected]"
] | |
351ef3112a8105eea8a02b98a6ff6303a19eee43 | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Integration/trend_LinearTrend/cycle_30/ar_/test_artificial_128_Integration_LinearTrend_30__100.py | 7a5e907e035774475c35332c1022bd9fc95546df | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 275 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 30, transform = "Integration", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"[email protected]"
] | |
42f0deaf250627b10751156d712d786cdc96ee26 | 6bf1b595a7f4d3cbf0995455869d438a7d0e0624 | /lingvo/tasks/milan/score_functions.py | 9c4ce867b372dfed657bec15a96096952923b006 | [
"Apache-2.0"
] | permissive | huaxz1986/lingvo | 889abc82b1bab6f37ba861c41eb480b7e89362c0 | b83984577610423e3b1c6b04ca248cd23f2842f7 | refs/heads/master | 2022-05-15T03:29:56.903688 | 2022-04-02T01:41:25 | 2022-04-02T01:41:25 | 173,536,461 | 1 | 0 | Apache-2.0 | 2019-03-03T05:52:01 | 2019-03-03T05:52:01 | null | UTF-8 | Python | false | false | 1,664 | py | # Lint as: python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of combination functions for dual-encoder models."""
from lingvo import compat as tf
from lingvo.core import base_layer
class DotProductScoreFunction(base_layer.BaseLayer):
"""Performs dot product combination between two encoded vectors."""
@classmethod
def Params(cls):
p = super().Params()
p.name = 'dot_product_score_function'
return p
def FProp(self, theta, x, y):
"""Computes pair-wise dot product similarity.
Args:
theta: NestedMap of variables belonging to this layer and its children.
x: batch of encoded representations from modality x. A float32 Tensor of
shape [x_batch_size, encoded_dim]
y: batch of encoded representations from modality y. A float32 Tensor of
shape [y_batch_size, encoded_dim]
Returns:
Pairwise dot products. A float32 Tensor with shape
`[x_batch_size, y_batch_size]`.
"""
return tf.matmul(x, y, transpose_b=True)
| [
"[email protected]"
] | |
31068cd2c89faea0c9efdff5214f7c0d9abac707 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_suffered.py | f5ba9fb4722605fcd51182e2e5bcc1348faf8603 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
#calss header
class _SUFFERED():
def __init__(self,):
self.name = "SUFFERED"
self.definitions = suffer
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['suffer']
| [
"[email protected]"
] | |
231a0e1fcc8967f9072dfe360b036cfcdba74643 | c105797a5b6f5aca0b892ccdadbb2697f80fb3ab | /python_base/base7/base7_3.py | 7a29be88d6785d292d6f115f65d970948129502d | [] | no_license | jj1165922611/SET_hogwarts | 6f987c4672bac88b021069c2f947ab5030c84982 | fbc8d7363af0a4ac732d603e2bead51c91b3f1f7 | refs/heads/master | 2023-01-31T19:41:27.525245 | 2020-12-15T13:43:45 | 2020-12-15T13:43:45 | 258,734,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,144 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2020-07-21
# @Author : Joey Jiang
# @File : base7_3.py
# @Software : PyCharm
# @Description: python控制流语法
# 1.1、分支结构
import random
a = 0
if a == 0:
print("a=0")
else:
print("a!=0")
# 1.2、多重分支
a = 1
if a == 1:
print("a=1")
elif a == 2:
print("a=2")
elif a == 3:
print("a==3")
else:
print("a!=1、2、3")
# 1.3、练习
# 分别使用分支嵌套以及多重分支去实现分段函数求值
# 3x - 5 (x>1)
# f(x)= x + 2 (-1<=x<=1)
# 5x + 3(x<-1)
# 1.3.1分支嵌套
x = -2
if x > 1:
print(3 * x - 5)
else:
if x >= -1:
print(x + 2)
else:
print(5 * x + 3)
# 1.3.2多重分支
if x > 1:
print(3 * x - 5)
elif x >= -1:
print(x + 2)
else:
print(5 * x + 3)
# 2.1练习
# 计算1~100的和
sum1 = 0
for i in range(1, 101):
sum1 = sum1 + i
print(sum1)
# 2.2练习
# 加入分支结构实现1~100之间偶数的求和
sum2 = 0
for i in range(1, 101):
if i % 2 == 0:
sum2 = sum2 + i
print(sum2)
# 2.3练习
# 使用python实现1~100之间偶数求和
sum3 = 0
for i in range(2, 101):
if i % 2 == 0:
sum3 = sum3 + i
print(sum3)
# 3、While循环
# 3.1、While Else
while_a = 1
while while_a == 1:
print("while_a=1")
while_a = while_a + 1
else:
print("while_a!=1")
print(while_a)
# 3.2、简单语句组
flag = 10
while flag == 10:
flag = flag + 1
else:
print(flag)
# 4、break语句
for i in range(4):
if i == 2:
break
print("i=", i)
# 5、continue语句
for j in range(4):
if j == 2:
continue
print("j=", j)
# 6、练习
"""
猜数字游戏,计算机出一个1~100之间的随机数由人来猜,
计算机根据人猜的数字分别给出提示大一点/小一点/猜对了
"""
guess_number = random.randint(1, 100)
print(guess_number)
while True:
number = int(input("请输入一个1~100之间的整数>"))
if number == guess_number:
print("猜对了")
break
elif number > guess_number:
print("大一点")
else:
print("小一点")
| [
"[email protected]"
] | |
a4ff973f1cfc97ff556afebe59b954dffd24c381 | 5d2041f8e03fba17af04494b84947528a623852d | /tools/onnx-script.py | 52a2e146b04ce549ebd06ff3f0449d9224f6bded | [
"MIT"
] | permissive | nihui/netron | e60ddf9f64de0d57d1b691502db18edb2aa6b511 | 3963751827f30dd0955b9ad5e80d316aae807272 | refs/heads/master | 2022-01-19T18:26:57.059856 | 2021-12-30T03:44:11 | 2021-12-30T03:44:11 | 249,875,598 | 9 | 4 | MIT | 2020-03-25T03:13:18 | 2020-03-25T03:13:17 | null | UTF-8 | Python | false | false | 9,563 | py |
from __future__ import unicode_literals
import io
import json
import os
import re
import sys
import onnx
from onnx.backend.test.case import collect_snippets
snippets = collect_snippets()
categories = {
'Constant': 'Constant',
'Conv': 'Layer',
'ConvInteger': 'Layer',
'ConvTranspose': 'Layer',
'FC': 'Layer',
'RNN': 'Layer',
'LSTM': 'Layer',
'GRU': 'Layer',
'Gemm': 'Layer',
'Dropout': 'Dropout',
'Elu': 'Activation',
'HardSigmoid': 'Activation',
'LeakyRelu': 'Activation',
'PRelu': 'Activation',
'ThresholdedRelu': 'Activation',
'Relu': 'Activation',
'Selu': 'Activation',
'Sigmoid': 'Activation',
'Tanh': 'Activation',
'LogSoftmax': 'Activation',
'Softmax': 'Activation',
'Softplus': 'Activation',
'Softsign': 'Activation',
'BatchNormalization': 'Normalization',
'InstanceNormalization': 'Normalization',
'LpNormalization': 'Normalization',
'LRN': 'Normalization',
'Flatten': 'Shape',
'Reshape': 'Shape',
'Tile': 'Shape',
'Xor': 'Logic',
'Not': 'Logic',
'Or': 'Logic',
'Less': 'Logic',
'And': 'Logic',
'Greater': 'Logic',
'Equal': 'Logic',
'AveragePool': 'Pool',
'GlobalAveragePool': 'Pool',
'GlobalLpPool': 'Pool',
'GlobalMaxPool': 'Pool',
'LpPool': 'Pool',
'MaxPool': 'Pool',
'MaxRoiPool': 'Pool',
'Concat': 'Tensor',
'Slice': 'Tensor',
'Split': 'Tensor',
'Pad': 'Tensor',
'ImageScaler': 'Data',
'Crop': 'Data',
'Upsample': 'Data',
'Transpose': 'Transform',
'Gather': 'Transform',
'Unsqueeze': 'Transform',
'Squeeze': 'Transform',
}
attribute_type_table = {
'undefined': None,
'float': 'float32', 'int': 'int64', 'string': 'string', 'tensor': 'tensor', 'graph': 'graph',
'floats': 'float32[]', 'ints': 'int64[]', 'strings': 'string[]', 'tensors': 'tensor[]', 'graphs': 'graph[]',
}
def generate_json_attr_type(attribute_type, attribute_name, op_type, op_domain):
assert isinstance(attribute_type, onnx.defs.OpSchema.AttrType)
key = op_domain + ':' + op_type + ':' + attribute_name
if key == ':Cast:to' or key == ':EyeLike:dtype' or key == ':RandomNormal:dtype':
return 'DataType'
s = str(attribute_type)
s = s[s.rfind('.')+1:].lower()
if s in attribute_type_table:
return attribute_type_table[s]
return None
def generate_json_attr_default_value(attr_value):
if not str(attr_value):
return None
if attr_value.HasField('i'):
return attr_value.i
if attr_value.HasField('s'):
return attr_value.s.decode('utf8')
if attr_value.HasField('f'):
return attr_value.f
return None
def generate_json_support_level_name(support_level):
assert isinstance(support_level, onnx.defs.OpSchema.SupportType)
s = str(support_level)
return s[s.rfind('.')+1:].lower()
def generate_json_types(types):
r = []
for type in types:
r.append(type)
r = sorted(r)
return r
def format_range(value):
if value == 2147483647:
return '∞'
return str(value)
def format_description(description):
def replace_line(match):
link = match.group(1)
url = match.group(2)
if not url.startswith("http://") and not url.startswith("https://"):
url = "https://github.com/onnx/onnx/blob/master/docs/" + url
return "[" + link + "](" + url + ")"
description = re.sub("\\[(.+)\\]\\(([^ ]+?)( \"(.+)\")?\\)", replace_line, description)
return description
def generate_json(schemas, json_file):
json_root = []
for schema in schemas:
json_schema = {}
json_schema['name'] = schema.name
if schema.domain:
json_schema['module'] = schema.domain
else:
json_schema['module'] = 'ai.onnx'
json_schema['version'] = schema.since_version
json_schema['support_level'] = generate_json_support_level_name(schema.support_level)
if schema.doc:
json_schema['description'] = format_description(schema.doc.lstrip())
if schema.attributes:
json_schema['attributes'] = []
for _, attribute in sorted(schema.attributes.items()):
json_attribute = {}
json_attribute['name'] = attribute.name
attribute_type = generate_json_attr_type(attribute.type, attribute.name, schema.name, schema.domain)
if attribute_type:
json_attribute['type'] = attribute_type
elif 'type' in json_attribute:
del json_attribute['type']
json_attribute['required'] = attribute.required
default_value = generate_json_attr_default_value(attribute.default_value)
if default_value:
json_attribute['default'] = default_value
json_attribute['description'] = format_description(attribute.description)
json_schema['attributes'].append(json_attribute)
if schema.inputs:
json_schema['inputs'] = []
for input in schema.inputs:
json_input = {}
json_input['name'] = input.name
json_input['type'] = input.typeStr
if input.option == onnx.defs.OpSchema.FormalParameterOption.Optional:
json_input['option'] = 'optional'
elif input.option == onnx.defs.OpSchema.FormalParameterOption.Variadic:
json_input['list'] = True
json_input['description'] = format_description(input.description)
json_schema['inputs'].append(json_input)
json_schema['min_input'] = schema.min_input
json_schema['max_input'] = schema.max_input
if schema.outputs:
json_schema['outputs'] = []
for output in schema.outputs:
json_output = {}
json_output['name'] = output.name
json_output['type'] = output.typeStr
if output.option == onnx.defs.OpSchema.FormalParameterOption.Optional:
json_output['option'] = 'optional'
elif output.option == onnx.defs.OpSchema.FormalParameterOption.Variadic:
json_output['list'] = True
json_output['description'] = format_description(output.description)
json_schema['outputs'].append(json_output)
json_schema['min_output'] = schema.min_output
json_schema['max_output'] = schema.max_output
if schema.min_input != schema.max_input:
json_schema['inputs_range'] = format_range(schema.min_input) + ' - ' + format_range(schema.max_input)
if schema.min_output != schema.max_output:
json_schema['outputs_range'] = format_range(schema.min_output) + ' - ' + format_range(schema.max_output)
if schema.type_constraints:
json_schema['type_constraints'] = []
for type_constraint in schema.type_constraints:
json_schema['type_constraints'].append({
'description': type_constraint.description,
'type_param_str': type_constraint.type_param_str,
'allowed_type_strs': type_constraint.allowed_type_strs
})
if schema.name in snippets:
def update_code(code):
lines = code.splitlines()
while len(lines) > 0 and re.search("\\s*#", lines[-1]):
lines.pop()
if len(lines) > 0 and len(lines[-1]) == 0:
lines.pop()
return '\n'.join(lines)
json_schema['examples'] = []
for summary, code in sorted(snippets[schema.name]):
json_schema['examples'].append({
'summary': summary,
'code': update_code(code)
})
if schema.name in categories:
json_schema['category'] = categories[schema.name]
json_root.append(json_schema);
json_root = sorted(json_root, key=lambda item: item['name'] + ':' + str(item['version'] if 'version' in item else 0).zfill(4))
with io.open(json_file, 'w', newline='') as fout:
json_root = json.dumps(json_root, indent=2)
for line in json_root.splitlines():
line = line.rstrip()
if sys.version_info[0] < 3:
line = str(line)
fout.write(line)
fout.write('\n')
def metadata():
json_file = os.path.join(os.path.dirname(__file__), '../source/onnx-metadata.json')
all_schemas_with_history = onnx.defs.get_all_schemas_with_history()
generate_json(all_schemas_with_history, json_file)
def optimize():
import onnx
from onnx import optimizer
file = sys.argv[2]
base = os.path.splitext(file)
onnx_model = onnx.load(file)
passes = optimizer.get_available_passes()
optimized_model = optimizer.optimize(onnx_model, passes)
onnx.save(optimized_model, base + '.optimized.onnx')
def infer():
import onnx
import onnx.shape_inference
from onnx import shape_inference
file = sys.argv[2]
base = os.path.splitext(file)[0]
onnx_model = onnx.load(base + '.onnx')
onnx_model = onnx.shape_inference.infer_shapes(onnx_model)
onnx.save(onnx_model, base + '.shape.onnx')
if __name__ == '__main__':
command_table = { 'metadata': metadata, 'optimize': optimize, 'infer': infer }
command = sys.argv[1]
command_table[command]()
| [
"[email protected]"
] | |
2b6b3d0ed44ecf20e0b302e6ccd0aa6574a753fa | 22cbb7cffc3e5cf53fe87d2db216fdb88c8b7a8c | /stems/gis/convert.py | e26ac0443e6bd20f52888999784f13231793fecd | [
"BSD-3-Clause"
] | permissive | ceholden/stems | 838eb496978f7b68ae72988e0469c60e8730cb9c | 2e219eb76a44d6897881642635103b3353fc5539 | refs/heads/master | 2022-02-12T21:56:41.939073 | 2019-08-19T23:09:49 | 2019-08-19T23:09:49 | 164,480,487 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,186 | py | """ GIS variable conversion library
Functions here are convenient ways of going from various representations
of GIS information used in this stack (e.g., WKT) to the following
representations:
* Coordinate Reference System
* :py:class:`rasterio.crs.CRS`
* Geotransform
* :py:class:`affine.Affine`
* Bounding Box
* :py:class:`rasterio.coords.BoundingBox`
* Bounds
* :py:class:`shapely.geom.Polygon`
"""
from functools import singledispatch
import logging
from affine import Affine
import numpy as np
from osgeo import osr
from rasterio.coords import BoundingBox
from rasterio.crs import CRS
from rasterio.errors import CRSError
import shapely.geometry
from ..utils import (find_subclasses,
register_multi_singledispatch)
logger = logging.getLogger()
LIST_TYPE = (tuple, list, np.ndarray, )
# XARRAY_TYPE = (xr.Dataset, xr.DataArray)
GEOM_TYPE = find_subclasses(shapely.geometry.base.BaseGeometry)
# ============================================================================
# Affine geotransform
@singledispatch
def to_transform(value, from_gdal=False):
""" Convert input into an :py:class:`affine.Affine` transform
Parameters
----------
value : Affine or iterable
6 numbers representing affine transform
from_gdal : bool, optional
If `value` is a tuple or list, specifies if transform
is GDAL variety (True) or rasterio/affine (False)
Returns
-------
affine.Affine
Affine transform
"""
raise _CANT_CONVERT(value)
@to_transform.register(Affine)
def _to_transform_affine(value, from_gdal=False):
return value
@register_multi_singledispatch(to_transform, LIST_TYPE)
def _to_transform_iter(value, from_gdal=False):
if from_gdal:
return Affine.from_gdal(*value[:6])
else:
return Affine(*value[:6])
@to_transform.register(str)
def _to_transform_str(value, from_gdal=False, sep=','):
return _to_transform_iter([float(v) for v in value.split(sep)])
# ============================================================================
# CRS
# TODO: Dispatch function for Cartopy
@singledispatch
def to_crs(value):
""" Convert a CRS representation to a :py:class:`rasterio.crs.CRS`
Parameters
----------
value : str, int, dict, or osr.SpatialReference
Coordinate reference system as WKT, Proj.4 string, EPSG code,
rasterio-compatible proj4 attributes in a dict, or OSR definition
Returns
-------
rasterio.crs.CRS
CRS
"""
raise _CANT_CONVERT(value)
@to_crs.register(CRS)
def _to_crs_crs(value):
return value
@to_crs.register(str)
def _to_crs_str(value):
# After rasterio=1.0.14 WKT is backbone so try it first
try:
crs_ = CRS.from_wkt(value)
crs_.is_valid
except CRSError as err:
logger.debug('Could not parse CRS as WKT', err)
try:
crs_ = CRS.from_string(value)
crs_.is_valid
except CRSError as err:
logger.debug('Could not parse CRS as Proj4', err)
raise CRSError('Could not interpret CRS input as '
'either WKT or Proj4')
return crs_
@to_crs.register(int)
def _to_crs_epsg(value):
return CRS.from_epsg(value)
@to_crs.register(dict)
def _to_crs_dict(value):
return CRS(value)
@to_crs.register(osr.SpatialReference)
def _to_crs_osr(value):
return CRS.from_wkt(value.ExportToWkt())
# ============================================================================
# BoundingBox
@singledispatch
def to_bounds(value):
""" Convert input to a :py:class:`rasterio.coords.BoundingBox`
Parameters
----------
value : iterable, or Polygon
Input containing some geographic information
Returns
-------
BoundingBox
Bounding box (left, bottom, right, top). Also described as
(minx, miny, maxx, maxy)
"""
raise _CANT_CONVERT(value)
@to_bounds.register(BoundingBox)
def _to_bounds_bounds(value):
return value
@register_multi_singledispatch(to_bounds, LIST_TYPE)
def _to_bounds_iter(value):
return BoundingBox(*value)
@register_multi_singledispatch(to_bounds, GEOM_TYPE)
def _to_bounds_geom(value):
return BoundingBox(*value.bounds)
# ============================================================================
# Polygon
@singledispatch
def to_bbox(value):
""" Convert input a bounding box :py:class:`shapely.geometry.Polygon`
Parameters
----------
value : BoundingBox
Object representing a bounding box, or an xarray object with coords
we can use to calculate one from
Returns
-------
shapely.geometry.Polygon
BoundingBox as a polygon
"""
raise _CANT_CONVERT(value)
@register_multi_singledispatch(to_bbox, GEOM_TYPE)
def _to_bbox_geom(value):
return _to_bbox_bounds(BoundingBox(*value.bounds))
@to_bbox.register(BoundingBox)
def _to_bbox_bounds(value):
return shapely.geometry.box(*value)
# ============================================================================
# UTILITIES
def _CANT_CONVERT(obj):
return TypeError(f"Don't know how to convert this type: {type(obj)}")
| [
"[email protected]"
] | |
7f9a2d07182faa806f9337f02a6a0ce4035514fd | 0676f6e4d3510a0305d29aa0b1fe740d538d3b63 | /Python/SImplifyPline/CleanUpPolyline.py | 1ce7d7116eb272886ed20d4186ae8a3b571c98fb | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | pgolay/PG_Scripts | f70ffe7e5ca07acd6f4caedc9a9aec566542da7c | 796704a7daa6ac222a40bb02afdb599f74a6b0d4 | refs/heads/master | 2021-01-19T16:53:41.525879 | 2017-02-07T18:26:10 | 2017-02-07T18:26:10 | 2,730,362 | 9 | 1 | null | 2016-12-30T17:58:08 | 2011-11-08T00:04:33 | Python | UTF-8 | Python | false | false | 1,898 | py | import Rhino
import scriptcontext as sc
"""
Cleans up by collapsing tiny segments in a polyline.
"""
def CleanUpPolyline():
while True:
tol = sc.doc.ModelAbsoluteTolerance
if sc.sticky.has_key("PLineSimplifyTol"):
tol = sc.sticky["PLineSimplifyTol"]
go = Rhino.Input.Custom.GetObject()
go.AcceptNumber(True, False)
go.GeometryFilter = Rhino.DocObjects.ObjectType.Curve
opDblTol = Rhino.Input.Custom.OptionDouble(tol)
go.AddOptionDouble("SegmentTolerance",opDblTol)
result = go.Get()
if( go.CommandResult() != Rhino.Commands.Result.Success ):
return
if result == Rhino.Input.GetResult.Object:
if type(go.Object(0).Geometry()) == Rhino.Geometry.PolylineCurve:
curve = go.Object(0).Geometry()
rc, pLine = curve.TryGetPolyline()
pLineId = go.Object(0).ObjectId
else:
sc.doc.Objects.UnselectAll()
sc.doc.Views.Redraw()
print "Sorry, that was not a polyline."
continue
break
elif result == Rhino.Input.GetResult.Option:
tol = opDblTol.CurrentValue
sc.sticky["PLineSimplifyTol"] = tol
continue
elif result == Rhino.Input.GetResult.Number:
tol = go.Number()
sc.sticky["PLineSimplifyTol"] = tol
continue
break
count = pLine.CollapseShortSegments(tol)
if count !=0:
sc.doc.Objects.Replace(pLineId, pLine)
sc.doc.Views.Redraw()
print str(count) + " short segments were collapsed."
else:
print "No short segments were collapsed."
pass
if __name__ == "__main__":
CleanUpPolyline() | [
"[email protected]"
] | |
a33b2f9f3cd62ddd7189114556f08b0144aad7c6 | b08d42933ac06045905d7c005ca9c114ed3aecc0 | /src/coefSubset/evaluate/ranks/tenth/rank_2p49_Q.py | c80b9b7c96acce81b347d895d8286c78c576e7d8 | [] | no_license | TanemuraKiyoto/PPI-native-detection-via-LR | d148d53f5eb60a4dda5318b371a3048e3f662725 | 897e7188b0da94e87126a4acc0c9a6ff44a64574 | refs/heads/master | 2022-12-05T11:59:01.014309 | 2020-08-10T00:41:17 | 2020-08-10T00:41:17 | 225,272,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,204 | py | # 9 July 2019
# Kiyoto Aramis Tanemura
# Several metrics are used to assess the performance of the trained RF model, notably native ranking. This script returns a ranking of the native protein-protein complex among a decoy set. For convenience, I will define as a function and will call in a general performance assessment script.
# Modified 11 July 2019 by Kiyoto Aramis Tanemura. To parallelize the process, I will replace the for loop for the testFileList to a multiprocessing pool.
# Modified 9 September 2019 by Kiyoto Aramis Tanemura. I will use the function to perform the calculation on one CSV file only. Thus instead of a function to import in other scripts, they will be individual jobs parallelized as individual jobs in the queue.
import os
import pandas as pd
import numpy as np
import pickle
os.chdir('/mnt/scratch/tanemur1/')
# Read the model and trainFile
testFile = '2p49.csv'
identifier = 'Q'
thresholdCoef = 0.1
testFilePath = '/mnt/scratch/tanemur1/CASF-PPI/nonb_descriptors/complete/'
modelPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/tenth/'
outputPath = '/mnt/home/tanemur1/6May2019/2019-11-11/results/coefSubset/evaluate/tenth/ranks/'
pdbID = testFile[:4]
with open(modelPath + 'model' + identifier + '.pkl', 'rb') as f:
clf = pickle.load(f)
result = pd.DataFrame()
scoreList = []
df1 = pd.read_csv(testFilePath + testFile)
dropList = ['Unnamed: 0', 'Unnamed: 0.1', 'ref']
df1 = df1.drop(dropList, axis = 1)
df1 = df1.set_index('Pair_name')
df1 = pd.DataFrame(df1.values.T, columns = df1.index, index = df1.columns)
df1.fillna(0.0, inplace = True)
df1 = df1.reindex(sorted(df1.columns), axis = 1)
# Drop features with coefficients below threshold
coefs = pd.read_csv('/mnt/home/tanemur1/6May2019/2019-11-11/results/medianCoefs.csv', index_col = 0, header = None, names = ['coefficients'])
coefs = coefs[np.abs(coefs['coefficients']) < thresholdCoef]
dropList = list(coefs.index)
del coefs
df1.drop(dropList, axis = 1, inplace = True)
with open(modelPath + 'standardScaler' + identifier + '.pkl', 'rb') as g:
scaler = pickle.load(g)
for i in range(len(df1)):
# subtract from one row each row of the dataframe, then remove the trivial row[[i]] - row[[i]]. Also some input files have 'class' column. This is erroneous and is removed.
df2 = pd.DataFrame(df1.iloc[[i]].values - df1.values, index = df1.index, columns = df1.columns)
df2 = df2.drop(df1.iloc[[i]].index[0], axis = 0)
# Standardize inut DF using the standard scaler used for training data.
df2 = scaler.transform(df2)
# Predict class of each comparison descriptor and sum the classes to obtain score. Higher score corresponds to more native-like complex
predictions = clf.predict(df2)
score = sum(predictions)
scoreList.append(score)
# Make a new DataFrame to store the score and corresponding descriptorID. Add rank as column. Note: lower rank corresponds to more native-like complex
result = pd.DataFrame(data = {'score': scoreList}, index = df1.index.tolist()).sort_values(by = 'score', ascending = False)
result['rank'] = range(1, len(result) + 1)
with open(outputPath + pdbID + identifier + '.csv', 'w') as h:
result.to_csv(h)
| [
"[email protected]"
] | |
2532b165031ee7384c512a86e01653fe3015a922 | a0c53168a4bdcfb0aa917d6d2c602f0999443a10 | /projex/docgen/document.py | 7ee7ccaa34818e7e66f325153fd66981982afda1 | [] | no_license | kanooshka/DPS_PIPELINE | 8067154c59ca5c8c9c09740969bb6e8537021903 | df2fcdecda5bce98e4235ffddde1e99f334562cc | refs/heads/master | 2021-05-24T04:32:03.457648 | 2018-09-07T13:25:11 | 2018-09-07T13:25:11 | 29,938,064 | 3 | 2 | null | 2020-07-23T23:06:37 | 2015-01-27T22:26:01 | Python | UTF-8 | Python | false | false | 41,392 | py | #!/usr/bin/python
""" Defines the document class that is used with the docgen system. """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2011, Projex Software'
__license__ = 'LGPL'
# maintanence information
__maintainer__ = 'Projex Software'
__email__ = '[email protected]'
#------------------------------------------------------------------------------
import inspect
import logging
import new
import os
import re
import xml.sax.saxutils
from projex import text
from projex import wikitext
from projex.docgen import templates
from projex.docgen import commands
logger = logging.getLogger(__name__)
DATA_TYPE_ORDER = [
'module',
'class',
'variable',
'member',
'property',
'enum',
'function',
'method',
'signal',
'slot',
'abstract method',
'class method',
'static method',
'deprecated method',
'built-in',
]
DATA_PRIVACY_ORDER = [
'public',
'imported public',
'protected',
'imported protected',
'private',
'imported private',
'built-in',
'imported built-in',
]
DATA_ORDER = []
for privacy in DATA_PRIVACY_ORDER:
for typ in DATA_TYPE_ORDER:
DATA_ORDER.append('%s %s' % (privacy, typ))
class Attribute(tuple):
""" Used to map tuple returns to support different python versions. """
def __init__( self, member_tuple ):
super(Attribute, self).__init__(member_tuple)
self.name = member_tuple[0]
self.kind = member_tuple[1]
self.defining_class = member_tuple[2]
self.object = member_tuple[3]
if ( hasattr(self.object, 'func_type') ):
self.kind = self.object.func_type
#------------------------------------------------------------------------------
class DocumentData(object):
""" Struct to hold data about a document object. """
name = None
value = None
dataType = None
privacy = None
def section( self ):
"""
Returns the section type for this data by joining the privacy and \
type information.
:return <str>
"""
return (self.privacy + ' ' + self.dataType)
@staticmethod
def create( name,
value,
kind = 'data',
defaultVarType = 'variable',
defaultFuncType ='function' ):
"""
Creates a new document data instance.
:return <DocumentData>
"""
# look for private members
results = re.match('^(_\w+)__.+', name)
if ( results ):
name = name.replace(results.group(1), '')
# determine the privacy level for this data
privacy = 'public'
if ( name.startswith('__') and name.endswith('__') ):
privacy = 'built-in'
elif ( name.startswith('__') ):
privacy = 'private'
elif ( name.startswith('_') ):
privacy = 'protected'
docdata = DocumentData()
docdata.name = name
docdata.value = value
# look for specific kinds of methods
if ( kind == 'method' ):
type_name = type(value).__name__
if ( type_name == 'pyqtSignal' ):
kind = 'signal'
elif ( type_name == 'pyqtSlot' ):
kind = 'slot'
elif ( type_name == 'pyqtProperty' ):
kind = 'property'
elif ( hasattr(value, 'func_type') ):
kind = getattr(value, 'func_type')
if ( kind != 'data' ):
docdata.dataType = kind
else:
docdata.dataType = commands.defaultValueType( value,
defaultVarType,
defaultFuncType )
docdata.privacy = privacy
return docdata
#------------------------------------------------------------------------------
class Document(object):
"""
Defines the class that collects all documentation for a python
object.
"""
cache = {}
aliases = {}
reverseAliases = {}
def __init__( self ):
self._object = None
self._parent = None
self._objectName = ''
self._html = ''
self._allMembersHtml = ''
self._title = ''
self._data = {}
self._sourceHtml = {}
self._children = []
# protected methods
def _bases( self, cls, recursive = False ):
"""
Looks up the bases for the inputed obj instance.
:param obj | <object>
:param recursive | <bool>
:return [<cls>, ..]
"""
if ( not inspect.isclass( cls ) ):
return []
output = list(cls.__bases__[:])
if ( not recursive ):
return output
for basecls in output:
output += self._bases(basecls, recursive = recursive)
return list(set(output))
def _collectMembers( self, obj ):
if ( not inspect.isclass( obj ) ):
return []
try:
members = inspect.classify_class_attrs(self._object)
except AttributeError:
members = []
# support python25-
if ( members and type(members[0]) == tuple ):
members = [ Attribute(member) for member in members ]
return members
def _generateAllMemberSummary( self, member ):
"""
Generates the member summary documentation.
:param member <Attribute>
:return <str>
"""
try:
obj = getattr(member.defining_class, member.name)
except AttributeError:
return ''
key = member.name
cls = member.defining_class
if ( 'method' in member.kind ):
docname = cls.__module__ + '-' + cls.__name__
doc = Document.cache.get(docname)
if ( doc ):
opts = (doc.url(relativeTo = self), key, key)
href = '<a href="%s#%s">%s</a>' % opts
else:
href = key
kind = member.kind
if ( hasattr(obj, 'func_type') ):
kind = obj.func_type
templ = '%s::%s%s'
if ( 'static' in kind ):
templ += ' [static]'
elif ( 'class' in kind ):
templ += ' [class]'
elif ( 'abstract' in kind ):
templ += ' [abstract]'
elif ( 'deprecated' in kind ):
templ += ' [deprecated]'
return templ % (cls.__name__, href, self._generateArgs(obj))
else:
opts = (cls.__name__, key, type(member.object).__name__)
return '%s::%s : %s' % opts
def _generateAllMembersDocs(self):
"""
Generates the all members documentation for this document.
:return <str>
"""
if ( not inspect.isclass(self._object) ):
return ''
members = self._collectMembers(self._object)
member_docs = []
members.sort( lambda x, y: cmp( x.name, y.name ) )
for member in members:
if ( member.name.startswith('__') and member.name.endswith('__') ):
continue
member_doc = self._generateAllMemberSummary(member)
if ( member_doc ):
member_docs.append('<li>%s</li>' % member_doc)
environ = commands.ENVIRON.copy()
environ['members_left'] = '\n'.join( member_docs[:len(member_docs)/2])
environ['members_right'] = '\n'.join( member_docs[len(member_docs)/2:])
environ['title'] = self.title()
environ['base_url'] = self.baseurl()
environ['static_url'] = environ['base_url'] + '/_static'
environ['navigation'] %= environ
return templates.template('allmembers.html') % environ
def _generateArgs(self, obj):
"""
Generates the argument information for the inputed object.
:param obj | <variant>
:return <str>
"""
try:
return inspect.formatargspec( *inspect.getargspec( obj ) )
except TypeError:
try:
return self._generateArgs( obj.im_func )
except AttributeError:
pass
if ( isinstance( obj, new.instancemethod ) and
hasattr( obj.im_func, 'func_args' ) ):
return obj.im_func.func_args
return '(*args, **kwds) [unknown]'
def _generateHtml( self ):
"""
Generates the HTML documentation for this document.
:return <str>
"""
if ( self.isNull() or self._html ):
return self._html
# generate module docs
if ( inspect.ismodule( self._object ) ):
return self._generateModuleDocs()
# generate class docs
elif ( inspect.isclass( self._object ) ):
return self._generateClassDocs()
# not sure what this is
return ''
def _generateClassDocs( self ):
"""
Generates class documentation for this object.
"""
html = []
self.parseData()
# determine the inheritance
bases = []
for base in self._bases( self._object ):
doc = commands.findDocument(base)
if ( doc ):
opt = {}
opt['text'] = base.__name__
opt['url'] = doc.url( relativeTo = self )
bases.append( templates.template('link_standard.html') % opt )
else:
bases.append( base.__name__ )
if ( len(bases) > 1 ):
basestxt = ', '.join(bases[:-1])
inherits = 'Inherits %s and %s.' % (basestxt, bases[-1])
elif (len(bases) == 1):
inherits = 'Inherits %s.' % bases[0]
else:
inherits = ''
# determine the subclasses
subclasses = []
for subcls in self._subclasses( self._object ):
doc = commands.findDocument(subcls)
if ( doc ):
opt = {}
opt['text'] = subcls.__name__
opt['url'] = doc.url( relativeTo = self )
subclasses.append( templates.template('link_standard.html') % opt )
else:
subclasses.append( subcls.__name__ )
if ( len(subclasses) > 1 ):
subs = ', '.join(subclasses[:-1])
inherited_by = 'Inherited by %s and %s.' % (subs, subclasses[-1])
elif ( len(subclasses) == 1 ):
inherited_by = 'Inherited by %s.' % (subclasses[0])
else:
inherited_by = ''
allmembers = self.objectName().split('.')[-1] + '-allmembers.html'
# generate the module environ
environ = commands.ENVIRON.copy()
environ['title'] = self.title()
environ['allmembers'] = './' + allmembers
environ['breadcrumbs'] = self.breadcrumbs()
environ['url'] = self.url()
environ['doctype'] = 'Class'
environ['inherits'] = inherits
environ['inherited_by'] = inherited_by
modname = self._object.__module__
moddoc = Document.cache.get(modname)
if ( moddoc ):
modurl = moddoc.url(relativeTo = self)
environ['module'] = '<a href="%s">%s</a>' % (modurl, modname)
else:
environ['module'] = modname
html.append( templates.template('header_class.html') % environ )
# generate the summary report
gdata = self.groupedData()
keys = [key for key in gdata.keys() if key in DATA_ORDER]
keys.sort(lambda x, y: cmp(DATA_ORDER.index(x), DATA_ORDER.index(y)))
for key in keys:
html.append( self._generateSummary( key, gdata[key] ) )
# generate the main documentation
maindocs = self._generateObjectDocs( self._object )
if ( maindocs ):
environ = commands.ENVIRON.copy()
environ['type'] = 'Class'
environ['contents'] = maindocs
html.append( templates.template('docs_main.html') % environ )
# generate the member documentation
funcs = self.data().values()
html.append( self._generateMemberDocs( 'Member Documentation',
funcs))
# generate the document environ
return '\n'.join(html)
def _generateMemberDocs( self, title, data ):
"""
Generates the member documentation for the inputed set of data.
:param title | <str>
:param data | [ <DocumentData>, .. ]
"""
if ( not data ):
return ''
bases = []
subclasses = []
# generate the html
html = []
data.sort(lambda x, y: cmp(x.name, y.name))
for entry in data:
# generate function information
if ( 'function' in entry.dataType or 'method' in entry.dataType ):
# lookup base methods for reimplimintation
reimpliments = []
for base in bases:
if ( entry.name in base.__dict__ ):
doc = commands.findDocument(base)
if ( doc ):
opt = {}
opt['text'] = base.__name__
opt['url'] = doc.url( relativeTo = self )
opt['url'] += '#' + entry.name
href = templates.template('link_standard.html') % opt
reimpliments.append( href )
else:
reimpliments.append( entry.name )
reimpliment_doc = ''
if ( reimpliments ):
urls = ','.join(reimpliments)
reimpliment_doc = 'Reimpliments from %s.' % urls
# lookup submodules for reimplimentation
reimplimented = []
for subcls in subclasses:
if ( entry.name in subcls.__dict__ ):
doc = commands.findDocument(subcls)
if ( doc ):
opt = {}
opt['text'] = subcls.__name__
opt['url'] = doc.url( relativeTo = self )
opt['url'] += '#' + entry.name
href = templates.template('link_standard.html') % opt
reimplimented.append( href )
else:
reimplimented.append( entry.name )
reimplimented_doc = ''
if ( reimplimented ):
urls = ','.join(reimplimented)
reimplimented_doc = 'Reimplimented by %s.' % urls
func_split = entry.dataType.split(' ')
desc = ''
if ( len(func_split) > 1 ):
desc = '[%s]' % func_split[0]
# add the function to the documentation
environ = commands.ENVIRON.copy()
environ['type'] = entry.dataType
environ['name'] = entry.name
environ['args'] = self._generateArgs( entry.value )
environ['desc'] = desc
environ['contents'] = self._generateObjectDocs(entry.value)
environ['reimpliments'] = reimpliment_doc
environ['reimplimented'] = reimplimented_doc
html.append( templates.template('docs_function.html') % environ )
elif ( entry.dataType == 'enum' ):
environ = commands.ENVIRON.copy()
environ['name'] = entry.name
value_contents = []
values = entry.value.values()
values.sort()
for value in values:
value_opts = {}
value_opts['key'] = entry.value[value]
value_opts['value'] = value
value_templ = templates.template('docs_enum_value.html')
value_item = value_templ % value_opts
value_contents.append( value_item )
environ['contents'] = '\n'.join(value_contents)
html.append( templates.template('docs_enum.html') % environ )
environ = {}
environ['title'] = title
environ['contents'] = '\n'.join( html )
return templates.template('docs_members.html') % environ
def _generateModuleDocs( self ):
"""
Generates module documentation for this object.
"""
html = []
# generate the module environ
environ = commands.ENVIRON.copy()
environ['title'] = self.title()
environ['base_url'] = self.baseurl()
environ['static_url'] = environ['base_url'] + '/_static'
environ['breadcrumbs'] = self.breadcrumbs()
environ['url'] = self.url()
environ['doctype'] = 'Module'
if ( '__init__' in self._object.__file__ ):
environ['doctype'] = 'Package'
url_split = environ['url'].split('/')
sources_url = './%s-source.html' % url_split[-1].split('.')[0]
environ['sources'] = sources_url
environ['navigation'] %= environ
html.append( templates.template('header_module.html') % environ )
# generate the summary report
gdata = self.groupedData()
for key in sorted( gdata.keys(), key = lambda x: DATA_ORDER.index(x)):
value = gdata[key]
html.append( self._generateSummary( key, gdata[key] ) )
# generate the main documentation
maindocs = self._generateObjectDocs( self._object )
if ( maindocs ):
environ = commands.ENVIRON.copy()
environ['type'] = 'Module'
environ['contents'] = maindocs
html.append( templates.template('docs_main.html') % environ )
# generate the member documentation
html.append( self._generateMemberDocs('Module Function Documentation',
self.data().values()))
return '\n'.join(html)
def _generateObjectDocs( self, obj ):
"""
Generates documentation based on the inputed object's docstring and
member variable information.
:param obj | <str>
:return <str> html
"""
# get the documentation
try:
docs = inspect.getdoc(obj)
except AttributeError:
pass
if ( docs == None ):
try:
docs = inspect.getcomments(obj)
except AttributeError:
docs = ''
return wikitext.render(docs,
commands.url_handler,
options=commands.RENDER_OPTIONS)
def _generateSourceDocs( self ):
"""
Return the documentation containing the source code.
:return <str>
"""
if ( not inspect.ismodule(self._object) ):
return ''
# load the code file
codefilename = os.path.splitext( self._object.__file__ )[0]
codefilename += '.py'
codefile = open(codefilename, 'r')
code = codefile.read()
codefile.close()
environ = commands.ENVIRON.copy()
environ['code'] = xml.sax.saxutils.escape(code)
environ['title'] = self.title()
environ['base_url'] = self.baseurl()
environ['static_url'] = environ['base_url'] + '/_static'
environ['breadcrumbs'] = self.breadcrumbs(includeSelf = True)
environ['navigation'] %= environ
return templates.template('source.html') % environ
def _generateSummary( self, section, values, columns = 1 ):
"""
Generates summary information for the inputed section and value
data.
:param section | <str>
:param values | [ <DocumentData>, .. ]
:param columns | <int>
:return <str>
"""
# strip out built-in variables
newvalues = []
for value in values:
if ( not (value.privacy == 'built-in' and
value.dataType == 'variable' )):
newvalues.append(value)
values = newvalues
if ( not values ):
return ''
# split the data into columns
values.sort( lambda x, y: cmp( x.name.lower(), y.name.lower() ) )
url = self.url()
coldata = []
if ( columns > 1 ):
pass
else:
coldata = [values]
html = []
processed = []
for colitem in coldata:
for data in colitem:
data_environ = {}
data_environ['url'] = url
data_environ['name'] = data.name
data_environ['type'] = data.dataType
processed.append( data.name )
if ( 'function' in data.dataType or
'method' in data.dataType ):
data_environ['args'] = self._generateArgs( data.value )
templ = templates.template('summary_function.html')
html.append( templ % data_environ )
elif ( data.dataType == 'enum' ):
templ = templates.template('summary_enum.html')
html.append( templ % data_environ )
elif ( 'variable' in data.dataType or
'member' in data.dataType ):
try:
value = getattr(self._object, data.name)
except AttributeError:
value = None
data_environ['value_type'] = type(value).__name__
templ = templates.template('summary_variable.html')
html.append( templ % data_environ )
else:
datadoc = commands.findDocument(data.value)
if ( datadoc ):
opts = {}
opts['text'] = data.name
opts['url'] = datadoc.url( relativeTo = self )
contents = templates.template('link_standard.html') % opts
else:
contents = data.name
data_environ['contents'] = contents
templ = templates.template('summary_item.html')
html.append( templ % data_environ )
# update the bases environ
members = self._collectMembers(self._object)
inherited_members = {}
for member in members:
mem_name = member.name
mem_kind = member.kind
mem_cls = member.defining_class
mem_value = member.object
if ( hasattr(member.object, 'func_type') ):
mem_kind = member.object.func_type
if ( mem_cls == self._object ):
continue
data = DocumentData.create( mem_name,
mem_value,
mem_kind,
'member',
'method' )
if ( section != data.section() ):
continue
inherited_members.setdefault( mem_cls, 0 )
inherited_members[mem_cls] += 1
inherit_summaries = []
templ = templates.template('summary_inherit.html')
bases = self._bases( self._object, True )
inherits = inherited_members.keys()
inherits.sort( lambda x, y: cmp( bases.index(x), bases.index(y) ) )
for inherited in inherits:
count = inherited_members[inherited]
doc = commands.findDocument( inherited )
if ( not doc ):
continue
opt = {}
opt['count'] = count
opt['base'] = inherited.__name__
opt['url'] = doc.url( relativeTo = self )
opt['type'] = section
inherit_summaries.append( templ % opt )
# generate the summary information
words = [word.capitalize() for word in text.words(section)]
words[-1] = text.pluralize(words[-1])
summary_environ = {}
summary_environ['contents'] = '\n'.join(html)
summary_environ['section'] = ' '.join(words)
summary_environ['inherits'] = '\n'.join(inherit_summaries)
return templates.template('summary.html') % summary_environ
def _subclasses( self, obj ):
"""
Looks up all the classes that inherit from this object.
:param obj | <object>
:return [<cls>, ..]
"""
output = []
for doc in Document.cache.values():
doc_obj = doc.object()
if ( inspect.isclass( doc_obj ) and
obj in doc_obj.__bases__ ):
output.append( doc_obj )
return output
#------------------------------------------------------------------------------
# public methods
def addChild( self, child ):
"""
Adds the inputed document as a sub-child for this document.
:param child | <Document>
"""
child._parent = self
self._children.append(child)
def allMembersHtml( self ):
"""
Returns the documentation for all the members linked to this document.
This method only applies to class objects.
:return <str>
"""
if ( not inspect.isclass( self._object ) ):
return ''
if ( not self._allMembersHtml ):
self._allMembersHtml = self._generateAllMembersDocs()
return self._allMembersHtml
def baseurl( self ):
"""
Returns the relative url to get back to the root of the documentation
api.
:return <str>
"""
baseurl = self.url()
count = len(baseurl.split('/'))
return ('../' * count).strip('/')
def breadcrumbs(self,
relativeTo = None,
first = True,
includeSelf = False):
"""
Creates a link to all of the previous modules for this item.
:param relativeTo | <Document> | Relative to another document.
first | <bool>
includeSelf | <bool> | Create a link to this doc.
:return <str>
"""
basecrumbs = ''
if ( not relativeTo ):
relativeTo = self
basecrumbs = self.title().split('.')[-1]
if ( includeSelf ):
opts = {
'url': './' + os.path.split(self.url())[1],
'text': self.title().split('.')[-1]
}
basecrumbs = templates.template('link_breadcrumbs.html') % opts
if ( inspect.isclass( self._object ) ):
doc = Document.cache.get( self._object.__module__ )
elif ( inspect.ismodule( self._object ) ):
parent_mod = '.'.join( self._object.__name__.split('.')[:-1] )
doc = Document.cache.get( parent_mod )
else:
doc = None
if ( doc ):
opts = {}
opts['url'] = doc.url(relativeTo)
opts['text' ] = doc.title().split('.')[-1]
link = templates.template('link_breadcrumbs.html') % opts
subcrumbs = doc.breadcrumbs(relativeTo, first = False)
else:
subcrumbs = ''
link = ''
parts = []
if ( first ):
# add the home url
baseurl = self.baseurl()
home_url = '%s/index.html' % baseurl
home_opts = { 'text': 'Home', 'url': home_url }
home_part = templates.template('link_breadcrumbs.html') % home_opts
parts.append(home_part)
# add the api url
api_url = '%s/api/index.html' % baseurl
api_opts = { 'text': 'API', 'url': api_url }
api_part = templates.template('link_breadcrumbs.html') % api_opts
parts.append(api_part)
if ( subcrumbs ):
parts.append( subcrumbs )
if ( link ):
parts.append( link )
if ( basecrumbs ):
parts.append( basecrumbs )
return ''.join( parts )
def children( self ):
"""
Returns the child documents for this instance.
:return [ <Document>, .. ]
"""
return self._children
def data( self ):
"""
Returns the data that has been loaded for this document.
:return <dict>
"""
return self._data
def export( self, basepath, page = None ):
"""
Exports the html files for this document and its children to the
given basepath.
:param basepath | <str>
:param page | <str> || None
:return <bool> success
"""
# make sure the base path exists
if ( not os.path.exists( basepath ) ):
return False
basepath = os.path.normpath(basepath)
url = self.url()
filename = os.path.join(basepath, url)
docpath = os.path.dirname(filename)
# add the doc path
if ( not os.path.exists(docpath) ):
os.makedirs(docpath)
if ( not page ):
page = templates.template('page.html')
# setup the default environ
commands.url_handler.setRootUrl(self.baseurl())
doc_environ = commands.ENVIRON.copy()
doc_environ['title'] = self.title()
doc_environ['base_url'] = self.baseurl()
doc_environ['static_url'] = doc_environ['base_url'] + '/_static'
doc_environ['contents'] = self.html()
doc_environ['breadcrumbs'] = self.breadcrumbs(includeSelf = True)
doc_environ['navigation'] %= doc_environ
# generate the main html file
exportfile = open(filename, 'w')
exportfile.write( page % doc_environ )
exportfile.close()
# generate the all members html file
allmember_html = self.allMembersHtml()
if ( allmember_html ):
fpath, fname = os.path.split(filename)
fname = fname.split('.')[0] + '-allmembers.html'
afilesource = os.path.join(fpath, fname)
doc_environ['contents'] = allmember_html
# create the crumbs
crumbs = self.breadcrumbs(includeSelf = True)
opts = {'url': '#', 'text': 'All Members'}
crumbs += templates.template('link_breadcrumbs.html') % opts
doc_environ['breadcrumbs'] = crumbs
# save the all members file
membersfile = open(afilesource, 'w')
membersfile.write( page % doc_environ )
membersfile.close()
# generate the source code file
source_html = self.sourceHtml()
if ( source_html ):
fpath, fname = os.path.split(filename)
fname = fname.split('.')[0] + '-source.html'
sfilesource = os.path.join(fpath, fname)
doc_environ['contents'] = source_html
# create the crumbs
crumbs = self.breadcrumbs(includeSelf = True)
opts = {'url': '#', 'text': 'Source Code'}
crumbs += templates.template('link_breadcrumbs.html') % opts
doc_environ['breadcrumbs'] = crumbs
# save the source file
sourcefile = open(sfilesource, 'w')
sourcefile.write( page % doc_environ )
sourcefile.close()
# generate the children
for child in self.children():
child.export(basepath, page)
def findData( self, dtype ):
"""
Looks up the inputed data objects based on the given data type.
:param dataType | <str>
:return <str>
"""
self.parseData()
output = []
for data in self._data.values():
if ( data.dataType == dtype or
(data.privacy + ' ' + data.dataType) == dtype ):
output.append(data)
return output
def groupedData( self ):
"""
Groups the data together based on their data types and returns it.
:return { <str> grp: [ <DocumentData>, .. ], .. }
"""
output = {}
values = self._data.values()
values.sort( lambda x, y: cmp(x.name, y.name) )
for data in values:
dtype = '%s %s' % (data.privacy, data.dataType)
output.setdefault(dtype, [])
output[dtype].append(data)
return output
def html( self ):
"""
Returns the generated html for this document.
:return <str>
"""
if ( not self._html ):
self._html = self._generateHtml()
return self._html
def isNull( self ):
"""
Returns whether or not this document has any data associated with it.
:return <bool>
"""
return self._object == None
def object( self ):
"""
Returns the object that this document represents.
:return <object> || None
"""
return self._object
def objectName( self ):
"""
Returns the object name that this object will represent. This will
be similar to a URL, should be unique per document.
:return <str>
"""
return self._objectName
def parent( self ):
"""
Returns the parent document of this instance.
:return <Document> || None
"""
return self._parent
def parseData( self ):
"""
Parses out all the information that is part of this item's object.
This is the method that does the bulk of the processing for the
documents.
:return <bool> success
"""
if ( self.isNull() or self._data ):
return False
class_attrs = []
obj = self.object()
# parse out class information
cls_kind_map = {}
if ( inspect.isclass( obj ) ):
contents = self._collectMembers(obj)
for const in contents:
if ( const[2] == obj ):
class_attrs.append( const[0] )
cls_kind_map[const.name] = const.kind
# try to load all the items
try:
members = dict(inspect.getmembers(obj))
except AttributeError:
members = {}
for key in dir(obj):
if ( not key in members ):
try:
members[key] = getattr(obj, key)
except AttributeError:
pass
modname = ''
if ( inspect.ismodule(obj) ):
modname = obj.__name__
for name, value in members.items():
# ignore inherited items
if ( class_attrs and not name in class_attrs ):
continue
varType = 'variable'
funcType = 'function'
kind = 'data'
if ( inspect.isclass( self._object ) ):
varType = 'member'
funcType = 'static method'
kind = cls_kind_map.get(name, 'data')
docdata = DocumentData.create( name,
value,
kind,
varType,
funcType )
if ( modname and hasattr(value, '__module__') and
modname != getattr(value, '__module__') ):
docdata.privacy = 'imported ' + docdata.privacy
self._data[name] = docdata
def setObject( self, obj ):
"""
Sets the object instance for this document to the inputed object. This
will be either a module, package, class, or enum instance. This will
clear the html information and title data.
:param obj | <variant>
"""
self._object = obj
self._html = ''
self._allMembersHtml = ''
self._title = str(obj.__name__)
if ( inspect.isclass( obj ) ):
self.setObjectName( '%s-%s' % (obj.__module__, obj.__name__) )
else:
self.setObjectName( obj.__name__ )
def setObjectName( self, objectName ):
"""
Sets the object name for this document to the given name.
:param objectName | <str>
"""
self._objectName = objectName
def setTitle( self, title ):
"""
Sets the title string for this document to the inputed string.
:param title | <str>
"""
self._title = title
def sourceHtml( self ):
"""
Returns the source file html for this document. This method only
applies to module documents.
:return <str>
"""
if ( not inspect.ismodule(self._object) ):
return ''
if ( not self._sourceHtml ):
self._sourceHtml = self._generateSourceDocs()
return self._sourceHtml
def title( self ):
"""
Returns the title string for this document.
:return <str>
"""
return self._title
def url( self, relativeTo = None ):
"""
Returns the path to this document's html file. If the optional
relativeTo keyword is specified, then the generated url will be made
in relation to the local path for the current document.
:param relativeTo <Document> || None
:return <str>
"""
modname = self.objectName()
if ( inspect.ismodule( self._object ) ):
if ( '__init__' in self._object.__file__ ):
modname += '.__init__'
if ( not relativeTo ):
return modname.replace('.','/') + '.html'
relmodule = relativeTo.objectName()
relobject = relativeTo.object()
if ( inspect.ismodule( relobject ) ):
if ( '__init__' in relobject.__file__ ):
relmodule += '.__init__'
relpath = relmodule.split('.')
mypath = modname.split('.')
go_up = '/..' * (len(relpath)-1)
go_down = '/'.join([ part for part in mypath if part ])
return (go_up + '/' + go_down + '.html').strip('/') | [
"[email protected]"
] | |
d81d76e9d8b22c664357e05b002bbb03f28bb514 | bdbd35f1d2ac6a303fbf68b54b4c9c7d5c5f2568 | /static_frame/test/unit/test_frame_iter.py | 15ed042b08bb9864afe3e6f3b2baae453318789e | [
"MIT"
] | permissive | leemit/static-frame | 3d6818c67e71a701ec93f439d3b16c40813e1540 | 2191ff2e05947851ef929fbaf49a81f75920483f | refs/heads/master | 2023-03-28T06:19:06.231726 | 2021-03-26T20:45:40 | 2021-03-26T20:45:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,260 | py | import unittest
import typing as tp
import numpy as np
import frame_fixtures as ff
import static_frame as sf
# from static_frame import Index
from static_frame import IndexHierarchy
# from static_frame import IndexHierarchyGO
# from static_frame import IndexYearMonth
# from static_frame import IndexYearGO
# from static_frame import IndexYear
from static_frame import IndexDate
# from static_frame import IndexDateGO
from static_frame import Series
from static_frame import Frame
from static_frame import FrameGO
from static_frame import TypeBlocks
# from static_frame import mloc
# from static_frame import ILoc
from static_frame import HLoc
# from static_frame import DisplayConfig
# from static_frame import IndexAutoFactory
from static_frame.test.test_case import TestCase
# from static_frame.test.test_case import skip_win
# from static_frame.test.test_case import skip_linux_no_display
# from static_frame.test.test_case import skip_pylt37
# from static_frame.test.test_case import temp_file
# from static_frame.core.exception import ErrorInitFrame
# from static_frame.core.exception import ErrorInitIndex
from static_frame.core.exception import AxisInvalid
nan = np.nan
class TestUnit(TestCase):
#---------------------------------------------------------------------------
def test_frame_iter_a(self) -> None:
records = (
(1, 2, 'a', False, True),
(30, 50, 'b', True, False))
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('x','y'))
self.assertEqual((f1.keys() == f1.columns).all(), True)
self.assertEqual([x for x in f1.columns], ['p', 'q', 'r', 's', 't'])
self.assertEqual([x for x in f1], ['p', 'q', 'r', 's', 't'])
def test_frame_iter_array_a(self) -> None:
records = (
(1, 2, 'a', False, True),
(30, 50, 'b', True, False))
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('x','y'))
self.assertEqual(
next(iter(f1.iter_array(axis=0))).tolist(),
[1, 30])
self.assertEqual(
next(iter(f1.iter_array(axis=1))).tolist(),
[1, 2, 'a', False, True])
def test_frame_iter_array_b(self) -> None:
arrays = list(np.random.rand(1000) for _ in range(100))
f1 = Frame.from_items(
zip(range(100), arrays)
)
# iter columns
post = f1.iter_array(axis=0).apply_pool(np.sum, max_workers=4, use_threads=True)
self.assertEqual(post.shape, (100,))
self.assertAlmostEqual(f1.sum().sum(), post.sum())
post = f1.iter_array(axis=0).apply_pool(np.sum, max_workers=4, use_threads=False)
self.assertEqual(post.shape, (100,))
self.assertAlmostEqual(f1.sum().sum(), post.sum())
def test_frame_iter_array_c(self) -> None:
arrays = []
for _ in range(8):
arrays.append(list(range(8)))
f1 = Frame.from_items(
zip(range(8), arrays)
)
func = {x: chr(x+65) for x in range(8)}
# iter columns
post = f1.iter_element().apply_pool(func, max_workers=4, use_threads=True)
self.assertEqual(post.to_pairs(0),
((0, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (1, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (2, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (3, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (4, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (5, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (6, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))), (7, ((0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F'), (6, 'G'), (7, 'H'))))
)
def test_frame_iter_array_d(self) -> None:
arrays = []
for _ in range(8):
arrays.append(list(range(8)))
f1 = Frame.from_items(
zip(range(8), arrays)
)
# when called with a pool, values are gien the func as a single argument, which for an element iteration is a tuple of coord, value
func = lambda arg: arg[0][1]
# iter columns
post = f1.iter_element_items().apply_pool(func, max_workers=4, use_threads=True)
self.assertEqual(post.to_pairs(0),
((0, ((0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0))), (1, ((0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1))), (2, ((0, 2), (1, 2), (2, 2), (3, 2), (4, 2), (5, 2), (6, 2), (7, 2))), (3, ((0, 3), (1, 3), (2, 3), (3, 3), (4, 3), (5, 3), (6, 3), (7, 3))), (4, ((0, 4), (1, 4), (2, 4), (3, 4), (4, 4), (5, 4), (6, 4), (7, 4))), (5, ((0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 5), (6, 5), (7, 5))), (6, ((0, 6), (1, 6), (2, 6), (3, 6), (4, 6), (5, 6), (6, 6), (7, 6))), (7, ((0, 7), (1, 7), (2, 7), (3, 7), (4, 7), (5, 7), (6, 7), (7, 7))))
)
def test_frame_iter_array_e(self) -> None:
f = sf.Frame.from_dict(
dict(diameter=(12756, 6792, 142984),
mass=(5.97, 0.642, 1898)),
index=('Earth', 'Mars', 'Jupiter'),
dtypes=dict(diameter=np.int64))
post = f.iter_array(axis=0).apply(np.sum)
self.assertTrue(post.dtype == float)
def test_frame_iter_array_f(self) -> None:
f = sf.Frame(np.arange(12).reshape(3,4),
index=IndexDate.from_date_range('2020-01-01', '2020-01-03'))
post = f.iter_array(axis=0).apply(np.sum, name='foo')
self.assertEqual(post.name, 'foo')
self.assertEqual(
f.iter_array(axis=0).apply(np.sum).to_pairs(),
((0, 12), (1, 15), (2, 18), (3, 21))
)
self.assertEqual(
f.iter_array(axis=1).apply(np.sum).to_pairs(),
((np.datetime64('2020-01-01'), 6), (np.datetime64('2020-01-02'), 22), (np.datetime64('2020-01-03'), 38))
)
def test_frame_iter_array_g(self) -> None:
f = sf.FrameGO(index=IndexDate.from_date_range('2020-01-01', '2020-01-03'))
post = list(f.iter_array(axis=0))
self.assertEqual(post, [])
post = list(f.iter_array(axis=1))
self.assertEqual([x.tolist() for x in post], [[], [], []])
#---------------------------------------------------------------------------
def test_frame_iter_tuple_a(self) -> None:
post = tuple(sf.Frame.from_elements(range(5)).iter_tuple(axis=0, constructor=tuple))
self.assertEqual(post, ((0, 1, 2, 3, 4),))
def test_frame_iter_tuple_b(self) -> None:
post = tuple(sf.Frame.from_elements(range(3), index=tuple('abc')).iter_tuple(axis=0))
self.assertEqual(post, ((0, 1, 2),))
self.assertEqual(tuple(post[0]._asdict().items()),
(('a', 0), ('b', 1), ('c', 2))
)
def test_frame_iter_tuple_c(self) -> None:
with self.assertRaises(AxisInvalid):
post = tuple(sf.Frame.from_elements(range(5)).iter_tuple(axis=2))
def test_frame_iter_tuple_d(self) -> None:
f = sf.FrameGO(index=IndexDate.from_date_range('2020-01-01', '2020-01-03'))
post = list(f.iter_tuple(constructor=tuple, axis=0))
self.assertEqual(post, [])
post = list(f.iter_tuple(axis=1))
self.assertEqual([len(x) for x in post], [0, 0, 0])
def test_frame_iter_tuple_e(self) -> None:
records = (
(1, 2, 'a', False, True),
(30, 50, 'b', True, False))
f1 = FrameGO.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('x','y'))
class Record(tp.NamedTuple):
x: object
y: object
post1 = list(f1.iter_tuple(constructor=Record))
self.assertTrue(all(isinstance(x, Record) for x in post1))
post2 = list(f1.iter_tuple(constructor=tuple))
self.assertEqual(post2,
[(1, 30), (2, 50), ('a', 'b'), (False, True), (True, False)])
#---------------------------------------------------------------------------
def test_frame_iter_series_a(self) -> None:
f1 = ff.parse('f(Fg)|s(2,8)|i(I,str)|c(Ig,str)|v(int)')
post1 = tuple(f1.iter_series(axis=0))
self.assertEqual(len(post1), 8)
self.assertEqual(post1[0].to_pairs(),
(('zZbu', -88017), ('ztsv', 92867)))
post2 = tuple(f1.iter_series(axis=1))
self.assertEqual(len(post2), 2)
self.assertEqual(post2[0].to_pairs(),
(('zZbu', -88017), ('ztsv', 162197), ('zUvW', -3648), ('zkuW', 129017), ('zmVj', 58768), ('z2Oo', 84967), ('z5l6', 146284), ('zCE3', 137759)))
#---------------------------------------------------------------------------
def test_frame_iter_tuple_items_a(self) -> None:
records = (
(1, 2, 'a', False, True),
(30, 50, 'b', True, False))
f1 = FrameGO.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('x','y'))
post1 = list(f1.iter_tuple_items(constructor=list))
self.assertEqual(post1, [('p', [1, 30]), ('q', [2, 50]), ('r', ['a', 'b']), ('s', [False, True]), ('t', [True, False])])
#---------------------------------------------------------------------------
def test_frame_iter_element_a(self) -> None:
# reindex both axis
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
self.assertEqual(
[x for x in f1.iter_element()],
[2, 2, 'a', False, False, 30, 34, 'b', True, False, 2, 95, 'c', False, False, 30, 73, 'd', True, True])
self.assertEqual(list(f1.iter_element(axis=1)),
[2, 30, 2, 30, 2, 34, 95, 73, 'a', 'b', 'c', 'd', False, True, False, True, False, False, False, True])
self.assertEqual([x for x in f1.iter_element_items()],
[(('w', 'p'), 2), (('w', 'q'), 2), (('w', 'r'), 'a'), (('w', 's'), False), (('w', 't'), False), (('x', 'p'), 30), (('x', 'q'), 34), (('x', 'r'), 'b'), (('x', 's'), True), (('x', 't'), False), (('y', 'p'), 2), (('y', 'q'), 95), (('y', 'r'), 'c'), (('y', 's'), False), (('y', 't'), False), (('z', 'p'), 30), (('z', 'q'), 73), (('z', 'r'), 'd'), (('z', 's'), True), (('z', 't'), True)])
post1 = f1.iter_element().apply(lambda x: '_' + str(x) + '_')
self.assertEqual(post1.to_pairs(0),
(('p', (('w', '_2_'), ('x', '_30_'), ('y', '_2_'), ('z', '_30_'))), ('q', (('w', '_2_'), ('x', '_34_'), ('y', '_95_'), ('z', '_73_'))), ('r', (('w', '_a_'), ('x', '_b_'), ('y', '_c_'), ('z', '_d_'))), ('s', (('w', '_False_'), ('x', '_True_'), ('y', '_False_'), ('z', '_True_'))), ('t', (('w', '_False_'), ('x', '_False_'), ('y', '_False_'), ('z', '_True_')))))
post2 = f1.iter_element(axis=1).apply(lambda x: '_' + str(x) + '_')
self.assertEqual(post2.to_pairs(0),
(('p', (('w', '_2_'), ('x', '_30_'), ('y', '_2_'), ('z', '_30_'))), ('q', (('w', '_2_'), ('x', '_34_'), ('y', '_95_'), ('z', '_73_'))), ('r', (('w', '_a_'), ('x', '_b_'), ('y', '_c_'), ('z', '_d_'))), ('s', (('w', '_False_'), ('x', '_True_'), ('y', '_False_'), ('z', '_True_'))), ('t', (('w', '_False_'), ('x', '_False_'), ('y', '_False_'), ('z', '_True_')))))
def test_frame_iter_element_b(self) -> None:
# reindex both axis
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
# support working with mappings
post = f1.iter_element().map_any({2: 200, False: 200})
self.assertEqual(post.to_pairs(0),
(('p', (('w', 200), ('x', 30), ('y', 200), ('z', 30))), ('q', (('w', 200), ('x', 34), ('y', 95), ('z', 73))), ('r', (('w', 'a'), ('x', 'b'), ('y', 'c'), ('z', 'd'))), ('s', (('w', 200), ('x', True), ('y', 200), ('z', True))), ('t', (('w', 200), ('x', 200), ('y', 200), ('z', True))))
)
def test_frame_iter_element_c(self) -> None:
a2 = np.array([
[None, None],
[None, 1],
[None, 5]
], dtype=object)
a1 = np.array([True, False, True])
a3 = np.array([['a'], ['b'], ['c']])
tb1 = TypeBlocks.from_blocks((a3, a1, a2))
f1 = Frame(tb1,
index=self.get_letters(None, tb1.shape[0]),
columns=IndexHierarchy.from_product(('i', 'ii'), ('a', 'b'))
)
values = list(f1.iter_element())
self.assertEqual(values,
['a', True, None, None, 'b', False, None, 1, 'c', True, None, 5]
)
f2 = f1.iter_element().apply(lambda x: str(x).lower().replace('e', ''))
self.assertEqual(f1.columns.__class__, f2.columns.__class__,)
self.assertEqual(f2.to_pairs(0),
((('i', 'a'), (('a', 'a'), ('b', 'b'), ('c', 'c'))), (('i', 'b'), (('a', 'tru'), ('b', 'fals'), ('c', 'tru'))), (('ii', 'a'), (('a', 'non'), ('b', 'non'), ('c', 'non'))), (('ii', 'b'), (('a', 'non'), ('b', '1'), ('c', '5'))))
)
def test_frame_iter_element_d(self) -> None:
f1 = sf.Frame.from_elements(['I', 'II', 'III'], columns=('A',))
f2 = sf.Frame.from_elements([67, 28, 99], columns=('B',), index=('I', 'II', 'IV'))
post = f1['A'].iter_element().map_any(f2['B'])
# if we do not match the mapping, we keep the value.
self.assertEqual(post.to_pairs(),
((0, 67), (1, 28), (2, 'III')))
def test_frame_iter_element_e(self) -> None:
f1 = Frame.from_records(np.arange(9).reshape(3, 3))
self.assertEqual(list(f1.iter_element(axis=1)),
[0, 3, 6, 1, 4, 7, 2, 5, 8])
mapping = {x: x*3 for x in range(9)}
f2 = f1.iter_element(axis=1).map_all(mapping)
self.assertEqual([d.kind for d in f2.dtypes.values],
['i', 'i', 'i'])
#---------------------------------------------------------------------------
def test_frame_iter_group_a(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index,name='foo')
f = f.set_index_hierarchy(('p', 'q'), drop=True)
with self.assertRaises(AxisInvalid):
_ = f.iter_group('s', axis=-1).apply(lambda x: x.shape)
post = f.iter_group('s').apply(lambda x: x.shape)
self.assertEqual(post.to_pairs(),
((False, (2, 3)), (True, (2, 3)))
)
def test_frame_iter_group_b(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index, name='foo')
post = f.iter_group(['p', 'q']).apply(len)
self.assertEqual(post.to_pairs(),
((('A', 1), 1), (('A', 2), 1), (('B', 1), 1), (('B', 2), 1))
)
def test_frame_iter_group_c(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index, name='foo')
with self.assertRaises(TypeError):
next(iter(f.iter_group(foo='x')))
with self.assertRaises(TypeError):
next(iter(f.iter_group(3, 5)))
self.assertEqual(next(iter(f.iter_group('q'))).to_pairs(0),
(('p', (('z', 'A'), ('w', 'B'))), ('q', (('z', 1), ('w', 1))), ('r', (('z', 'a'), ('w', 'c'))), ('s', (('z', False), ('w', False))), ('t', (('z', False), ('w', False))))
)
def test_frame_iter_group_d(self) -> None:
f = sf.Frame.from_element(1, columns=[1,2,3], index=['a'])
empty = f.reindex([])
self.assertEqual(list(empty.iter_element()), [])
self.assertEqual(list(empty.iter_group(key=1)), [])
def test_frame_iter_group_e(self) -> None:
f = sf.Frame.from_element(None, columns=[1,2,3], index=['a'])
empty = f.reindex([])
self.assertEqual(list(empty.iter_element()), [])
self.assertEqual(list(empty.iter_group(key=1)), [])
def test_frame_iter_group_f(self) -> None:
f = sf.Frame(np.arange(3).reshape(1,3), columns=tuple('abc'))
f = f.drop.loc[0]
post1 = tuple(f.iter_group(['b','c']))
self.assertEqual(post1, ())
post2 = tuple(f.iter_group('a'))
self.assertEqual(post2, ())
#---------------------------------------------------------------------------
def test_frame_iter_group_items_a(self) -> None:
# testing a hierarchical index and columns, selecting column with a tuple
records = (
('a', 999999, 0.1),
('a', 201810, 0.1),
('b', 999999, 0.4),
('b', 201810, 0.4))
f1 = Frame.from_records(records, columns=list('abc'))
f1 = f1.set_index_hierarchy(['a', 'b'], drop=False)
f1 = f1.relabel_level_add(columns='i')
groups = list(f1.iter_group_items(('i', 'a'), axis=0))
self.assertEqual(groups[0][0], 'a')
self.assertEqual(groups[0][1].to_pairs(0),
((('i', 'a'), ((('a', 999999), 'a'), (('a', 201810), 'a'))), (('i', 'b'), ((('a', 999999), 999999), (('a', 201810), 201810))), (('i', 'c'), ((('a', 999999), 0.1), (('a', 201810), 0.1)))))
self.assertEqual(groups[1][0], 'b')
self.assertEqual(groups[1][1].to_pairs(0),
((('i', 'a'), ((('b', 999999), 'b'), (('b', 201810), 'b'))), (('i', 'b'), ((('b', 999999), 999999), (('b', 201810), 201810))), (('i', 'c'), ((('b', 999999), 0.4), (('b', 201810), 0.4)))))
def test_frame_iter_group_items_b(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index,name='foo')
f = f.set_index_hierarchy(('p', 'q'), drop=True)
post = f.iter_group_items('s').apply(
lambda k, x: f'{k}: {len(x)}')
self.assertEqual(post.to_pairs(),
((False, 'False: 2'), (True, 'True: 2'))
)
def test_frame_iter_group_items_c(self) -> None:
# Test optimized sorting approach. Data must have a non-object dtype and key must be single
data = np.array([[0, 1, 1, 3],
[3, 3, 2, 3],
[5, 5, 1, 3],
[7, 2, 2, 4]])
frame = sf.Frame(data, columns=tuple('abcd'), index=tuple('wxyz'))
# Column
groups = list(frame.iter_group_items('c', axis=0))
expected_pairs = [
(('a', (('w', 0), ('y', 5))),
('b', (('w', 1), ('y', 5))),
('c', (('w', 1), ('y', 1))),
('d', (('w', 3), ('y', 3)))),
(('a', (('x', 3), ('z', 7))),
('b', (('x', 3), ('z', 2))),
('c', (('x', 2), ('z', 2))),
('d', (('x', 3), ('z', 4))))]
self.assertEqual([1, 2], [group[0] for group in groups])
self.assertEqual(expected_pairs, [group[1].to_pairs(axis=0) for group in groups])
# Index
groups = list(frame.iter_group_items('w', axis=1))
expected_pairs = [
(('a', (('w', 0), ('x', 3), ('y', 5), ('z', 7))),), #type: ignore
(('b', (('w', 1), ('x', 3), ('y', 5), ('z', 2))), #type: ignore
('c', (('w', 1), ('x', 2), ('y', 1), ('z', 2)))),
(('d', (('w', 3), ('x', 3), ('y', 3), ('z', 4))),)] #type: ignore
self.assertEqual([0, 1, 3], [group[0] for group in groups])
self.assertEqual(expected_pairs, [group[1].to_pairs(axis=0) for group in groups])
def test_frame_iter_group_items_d(self) -> None:
# Test iterating with multiple key selection
data = np.array([[0, 1, 1, 3],
[3, 3, 2, 3],
[5, 5, 1, 3],
[7, 2, 2, 4]])
frame = sf.Frame(data, columns=tuple('abcd'), index=tuple('wxyz'))
# Column
groups = list(frame.iter_group_items(['c', 'd'], axis=0))
expected_pairs = [
(('a', (('w', 0), ('y', 5))),
('b', (('w', 1), ('y', 5))),
('c', (('w', 1), ('y', 1))),
('d', (('w', 3), ('y', 3)))),
(('a', (('x', 3),)),
('b', (('x', 3),)),
('c', (('x', 2),)),
('d', (('x', 3),))),
(('a', (('z', 7),)),
('b', (('z', 2),)),
('c', (('z', 2),)),
('d', (('z', 4),)))]
self.assertEqual([(1, 3), (2, 3), (2, 4)], [group[0] for group in groups])
self.assertEqual(expected_pairs, [group[1].to_pairs(axis=0) for group in groups])
# Index
groups = list(frame.iter_group_items(['x', 'y'], axis=1))
expected_pairs = [
(('c', (('w', 1), ('x', 2), ('y', 1), ('z', 2))),), #type: ignore
(('d', (('w', 3), ('x', 3), ('y', 3), ('z', 4))),), #type: ignore
(('a', (('w', 0), ('x', 3), ('y', 5), ('z', 7))), #type: ignore
('b', (('w', 1), ('x', 3), ('y', 5), ('z', 2)))),
]
self.assertEqual([(2, 1), (3, 3), (3, 5)], [group[0] for group in groups])
self.assertEqual(expected_pairs, [group[1].to_pairs(axis=0) for group in groups])
def test_frame_iter_group_items_e(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index,name='foo')
# using an array to select
self.assertEqual(
tuple(k for k, v in f.iter_group_items(f.columns == 's')),
((False,), (True,))
)
self.assertEqual(
tuple(k for k, v in f.iter_group_items(f.columns.isin(('p', 't')))),
(('A', False), ('B', False), ('B', True))
)
self.assertEqual(
tuple(k for k, v in f.iter_group_items(['s', 't'])),
((False, False), (True, False), (True, True))
)
self.assertEqual(
tuple(k for k, v in f.iter_group_items(slice('s','t'))),
((False, False), (True, False), (True, True))
)
def test_frame_iter_group_items_f(self) -> None:
objs = [object() for _ in range(2)]
data = [[1, 2, objs[0]], [3, 4, objs[0]], [5, 6, objs[1]]]
f = sf.Frame.from_records(data, columns=tuple('abc'))
post1 = {k: v for k, v in f.iter_group_items('c')}
post2 = {k[0]: v for k, v in f.iter_group_items(['c'])} # as a list, this gets a multiple key
self.assertEqual(len(post1), 2)
self.assertEqual(len(post1), len(post2))
obj_a = objs[0]
obj_b = objs[1]
self.assertEqual(post1[obj_a].shape, (2, 3))
self.assertEqual(post1[obj_a].shape, post2[obj_a].shape)
self.assertEqual(post1[obj_a].to_pairs(0),
(('a', ((0, 1), (1, 3))), ('b', ((0, 2), (1, 4))), ('c', ((0, obj_a), (1, obj_a)))))
self.assertEqual(post2[obj_a].to_pairs(0),
(('a', ((0, 1), (1, 3))), ('b', ((0, 2), (1, 4))), ('c', ((0, obj_a), (1, obj_a)))))
self.assertEqual(post1[obj_b].shape, (1, 3))
self.assertEqual(post1[obj_b].shape, post2[obj_b].shape)
self.assertEqual(post1[obj_b].to_pairs(0),
(('a', ((2, 5),)), ('b', ((2, 6),)), ('c', ((2, obj_b),))))
self.assertEqual(post2[obj_b].to_pairs(0),
(('a', ((2, 5),)), ('b', ((2, 6),)), ('c', ((2, obj_b),))))
#---------------------------------------------------------------------------
def test_frame_iter_group_index_a(self) -> None:
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('x', 'y', 'z'))
with self.assertRaises(TypeError):
f1.iter_group_labels(3, 4)
with self.assertRaises(TypeError):
f1.iter_group_labels(foo=4)
post = tuple(f1.iter_group_labels(0, axis=0))
self.assertEqual(len(post), 3)
self.assertEqual(
f1.iter_group_labels(0, axis=0).apply(lambda x: x[['p', 'q']].values.sum()).to_pairs(),
(('x', 4), ('y', 64), ('z', 97))
)
def test_frame_iter_group_index_b(self) -> None:
records = (
(2, 2, 'a', 'q', False, False),
(30, 34, 'b', 'c', True, False),
(2, 95, 'c', 'd', False, False),
)
f1 = Frame.from_records(records,
columns=IndexHierarchy.from_product((1, 2, 3), ('a', 'b')),
index=('x', 'y', 'z'))
# with axis 1, we are grouping based on columns while maintain the index
post_tuple = tuple(f1.iter_group_labels(1, axis=1))
self.assertEqual(len(post_tuple), 2)
post = f1[HLoc[f1.columns[0]]]
self.assertEqual(post.__class__, Series)
self.assertEqual(post.to_pairs(),
(('x', 2), ('y', 30), ('z', 2))
)
post = f1.loc[:, HLoc[f1.columns[0]]]
self.assertEqual(post.__class__, Series)
self.assertEqual(post.to_pairs(),
(('x', 2), ('y', 30), ('z', 2))
)
self.assertEqual(
f1.iter_group_labels(1, axis=1).apply(lambda x: x.iloc[:, 0].sum()).to_pairs(),
(('a', 34), ('b', 131))
)
def test_frame_iter_group_index_c(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = (('A', 1, 'a', False, False),
('A', 2, 'b', True, False),
('B', 1, 'c', False, False),
('B', 2, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index,name='foo')
f = f.set_index_hierarchy(('p', 'q'), drop=True)
with self.assertRaises(AxisInvalid):
_ = f.iter_group_labels_items(0, axis=-1).apply(lambda k, x: f'{k}:{x.size}')
post = f.iter_group_labels_items(0).apply(lambda k, x: f'{k}:{x.size}')
self.assertEqual(post.to_pairs(),
(('A', 'A:6'), ('B', 'B:6'))
)
#---------------------------------------------------------------------------
def test_frame_reversed(self) -> None:
columns = tuple('pqrst')
index = tuple('zxwy')
records = ((2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True))
f = Frame.from_records(
records, columns=columns, index=index,name='foo')
self.assertTrue(tuple(reversed(f)) == tuple(reversed(columns)))
#---------------------------------------------------------------------------
def test_frame_axis_window_items_a(self) -> None:
base = np.array([1, 2, 3, 4])
records = (base * n for n in range(1, 21))
f1 = Frame.from_records(records,
columns=list('ABCD'),
index=self.get_letters(20))
post0 = tuple(f1._axis_window_items(size=2, axis=0))
self.assertEqual(len(post0), 19)
self.assertEqual(post0[0][0], 'b')
self.assertEqual(post0[0][1].__class__, Frame)
self.assertEqual(post0[0][1].shape, (2, 4))
self.assertEqual(post0[-1][0], 't')
self.assertEqual(post0[-1][1].__class__, Frame)
self.assertEqual(post0[-1][1].shape, (2, 4))
post1 = tuple(f1._axis_window_items(size=2, axis=1))
self.assertEqual(len(post1), 3)
self.assertEqual(post1[0][0], 'B')
self.assertEqual(post1[0][1].__class__, Frame)
self.assertEqual(post1[0][1].shape, (20, 2))
self.assertEqual(post1[-1][0], 'D')
self.assertEqual(post1[-1][1].__class__, Frame)
self.assertEqual(post1[-1][1].shape, (20, 2))
def test_frame_axis_window_items_b(self) -> None:
base = np.array([1, 2, 3, 4])
records = (base * n for n in range(1, 21))
f1 = Frame.from_records(records,
columns=list('ABCD'),
index=self.get_letters(20))
post0 = tuple(f1._axis_window_items(size=2, axis=0, as_array=True))
self.assertEqual(len(post0), 19)
self.assertEqual(post0[0][0], 'b')
self.assertEqual(post0[0][1].__class__, np.ndarray)
self.assertEqual(post0[0][1].shape, (2, 4))
self.assertEqual(post0[-1][0], 't')
self.assertEqual(post0[-1][1].__class__, np.ndarray)
self.assertEqual(post0[-1][1].shape, (2, 4))
post1 = tuple(f1._axis_window_items(size=2, axis=1, as_array=True))
self.assertEqual(len(post1), 3)
self.assertEqual(post1[0][0], 'B')
self.assertEqual(post1[0][1].__class__, np.ndarray)
self.assertEqual(post1[0][1].shape, (20, 2))
self.assertEqual(post1[-1][0], 'D')
self.assertEqual(post1[-1][1].__class__, np.ndarray)
self.assertEqual(post1[-1][1].shape, (20, 2))
def test_frame_iter_window_a(self) -> None:
base = np.array([1, 2, 3, 4])
records = (base * n for n in range(1, 21))
f1 = Frame.from_records(records,
columns=list('ABCD'),
index=self.get_letters(20))
self.assertEqual(
f1.iter_window(size=3).apply(lambda f: f['B'].sum()).to_pairs(),
(('c', 12), ('d', 18), ('e', 24), ('f', 30), ('g', 36), ('h', 42), ('i', 48), ('j', 54), ('k', 60), ('l', 66), ('m', 72), ('n', 78), ('o', 84), ('p', 90), ('q', 96), ('r', 102), ('s', 108), ('t', 114))
)
post = list(f1.iter_window(size=3))
self.assertEqual(len(post), 18)
self.assertTrue(all(f.shape == (3, 4) for f in post))
#---------------------------------------------------------------------------
def test_frame_axis_interface_a(self) -> None:
# reindex both axis
records = (
(1, 2, 'a', False, True),
(30, 34, 'b', True, False),
(54, 95, 'c', False, False),
(65, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
self.assertEqual(f1.to_pairs(1),
(('w', (('p', 1), ('q', 2), ('r', 'a'), ('s', False), ('t', True))), ('x', (('p', 30), ('q', 34), ('r', 'b'), ('s', True), ('t', False))), ('y', (('p', 54), ('q', 95), ('r', 'c'), ('s', False), ('t', False))), ('z', (('p', 65), ('q', 73), ('r', 'd'), ('s', True), ('t', True)))))
for x in f1.iter_tuple(axis=0):
self.assertTrue(len(x), 4)
for x in f1.iter_tuple(axis=1):
self.assertTrue(len(x), 5)
f2 = f1[['p', 'q']]
s1 = f2.iter_array(axis=0).apply(np.sum)
self.assertEqual(list(s1.items()), [('p', 150), ('q', 204)])
s2 = f2.iter_array(axis=1).apply(np.sum)
self.assertEqual(list(s2.items()),
[('w', 3), ('x', 64), ('y', 149), ('z', 138)])
def sum_if(idx: tp.Hashable, vals: tp.Iterable[int]) -> tp.Optional[int]:
if idx in ('x', 'z'):
return tp.cast(int, np.sum(vals))
return None
s3 = f2.iter_array_items(axis=1).apply(sum_if)
self.assertEqual(list(s3.items()),
[('w', None), ('x', 64), ('y', None), ('z', 138)])
#---------------------------------------------------------------------------
def test_frame_group_a(self) -> None:
# reindex both axis
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
with self.assertRaises(AxisInvalid):
post = tuple(f1._axis_group_iloc_items(4, axis=-1))
post = tuple(f1._axis_group_iloc_items(4, axis=0)) # row iter, group by column 4
group1, group_frame_1 = post[0]
group2, group_frame_2 = post[1]
self.assertEqual(group1, False)
self.assertEqual(group2, True)
self.assertEqual(group_frame_1.to_pairs(0),
(('p', (('w', 2), ('x', 30), ('y', 2))), ('q', (('w', 2), ('x', 34), ('y', 95))), ('r', (('w', 'a'), ('x', 'b'), ('y', 'c'))), ('s', (('w', False), ('x', True), ('y', False))), ('t', (('w', False), ('x', False), ('y', False)))))
self.assertEqual(group_frame_2.to_pairs(0),
(('p', (('z', 30),)), ('q', (('z', 73),)), ('r', (('z', 'd'),)), ('s', (('z', True),)), ('t', (('z', True),))))
def test_frame_group_b(self) -> None:
# reindex both axis
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
# column iter, group by row 0
post = list(f1._axis_group_iloc_items(0, axis=1))
self.assertEqual(post[0][0], 2)
self.assertEqual(post[0][1].to_pairs(0),
(('p', (('w', 2), ('x', 30), ('y', 2), ('z', 30))), ('q', (('w', 2), ('x', 34), ('y', 95), ('z', 73)))))
self.assertEqual(post[1][0], False)
self.assertEqual(post[1][1].to_pairs(0),
(('s', (('w', False), ('x', True), ('y', False), ('z', True))), ('t', (('w', False), ('x', False), ('y', False), ('z', True)))))
self.assertEqual(post[2][0], 'a')
self.assertEqual(post[2][1].to_pairs(0),
(('r', (('w', 'a'), ('x', 'b'), ('y', 'c'), ('z', 'd'))),))
def test_frame_axis_interface_b(self) -> None:
# reindex both axis
records = (
(2, 2, 'a', False, False),
(30, 34, 'b', True, False),
(2, 95, 'c', False, False),
(30, 73, 'd', True, True),
)
f1 = Frame.from_records(records,
columns=('p', 'q', 'r', 's', 't'),
index=('w', 'x', 'y', 'z'))
post = list(f1.iter_group_items('s', axis=0))
self.assertEqual(post[0][1].to_pairs(0),
(('p', (('w', 2), ('y', 2))), ('q', (('w', 2), ('y', 95))), ('r', (('w', 'a'), ('y', 'c'))), ('s', (('w', False), ('y', False))), ('t', (('w', False), ('y', False)))))
self.assertEqual(post[1][1].to_pairs(0),
(('p', (('x', 30), ('z', 30))), ('q', (('x', 34), ('z', 73))), ('r', (('x', 'b'), ('z', 'd'))), ('s', (('x', True), ('z', True))), ('t', (('x', False), ('z', True)))))
s1 = f1.iter_group('p', axis=0).apply(lambda f: f['q'].values.sum())
self.assertEqual(list(s1.items()), [(2, 97), (30, 107)])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
6e1066a32d3b678c93a683c91c32ca9925549774 | 72d010d00355fc977a291c29eb18aeb385b8a9b0 | /MPK261/__init__.py | 1878e1129184af07da8510e9e370e01adae46916 | [] | no_license | maratbakirov/AbletonLive10_MIDIRemoteScripts | bf0749c5c4cce8e83b23f14f671e52752702539d | ed1174d9959b20ed05fb099f0461bbc006bfbb79 | refs/heads/master | 2021-06-16T19:58:34.038163 | 2021-05-09T11:46:46 | 2021-05-09T11:46:46 | 203,174,328 | 0 | 0 | null | 2019-08-19T13:04:23 | 2019-08-19T13:04:22 | null | UTF-8 | Python | false | false | 741 | py | # Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/MPK261/__init__.py
# Compiled at: 2018-04-23 20:27:04
from __future__ import absolute_import, print_function, unicode_literals
from .MPK261 import MPK261
from _Framework.Capabilities import controller_id, inport, outport, CONTROLLER_ID_KEY, PORTS_KEY, NOTES_CC, SCRIPT, REMOTE
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=2536, product_ids=[
37], model_name='MPK261'),
PORTS_KEY: [
inport(props=[NOTES_CC, SCRIPT, REMOTE]),
outport(props=[SCRIPT, REMOTE])]}
def create_instance(c_instance):
return MPK261(c_instance)
| [
"[email protected]"
] | |
579153317b369ad77af1c66c5cb43036e863cc19 | 5be8b0f2ee392abeee6970e7a6364ac9a5b8ceaa | /xiaojian/second_phase/day12/http_sever2.0.py | 12ccde8198046391e24f9698efd843eacb0c011c | [] | no_license | Wellsjian/20180826 | 424b65f828f0174e4d568131da01dafc2a36050a | 0156ad4db891a2c4b06711748d2624080578620c | refs/heads/master | 2021-06-18T12:16:08.466177 | 2019-09-01T10:06:44 | 2019-09-01T10:06:44 | 204,462,572 | 0 | 1 | null | 2021-04-20T18:26:03 | 2019-08-26T11:38:09 | JavaScript | UTF-8 | Python | false | false | 3,467 | py | """
HTTP 2.0
接口设计:
1.提供句柄,通过句柄调用属性和方法
obj = open()
lock = Lock()
2.实例化对象,通过对象设置,启动服务
t = Thread()
p = Process()
3.根据功能需求,无法帮助用户决定的内容,通过参数传递
4.能够解决的问题,不要让用户去解决,需要用户解决的问题可以用重写的方法去解决
技术分析:
HTTP 协议
思路分析
1.使用类进行封装
2.从用户的角度决定代码的编写
"""
# 具体HTTP sever功能.
from socket import *
from select import *
class HTTPSever:
def __init__(self, host, port, dir):
self.addrss = (host, port)
self.host = host
self.port = port
self.dir = dir
self.rlist = []
self.wlist = []
self.xlist = []
self.create_socket()
self.bind()
# 创建套接字
def create_socket(self):
self.sockfd = socket()
self.sockfd.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
# 绑定地址
def bind(self):
self.sockfd.bind(self.addrss)
# 启动服务
def server_forver(self):
self.sockfd.listen(5)
print("listen the port %d" % self.port)
self.rlist.append(self.sockfd)
while True:
rs, ws, xs = select(self.rlist, self.wlist, self.xlist)
self.do_rlist(rs)
# 具体处理请求
def handle(self, connfd):
request = connfd.recv(1024)
if not request:
connfd.close()
self.rlist.remove(connfd)
return
# 提取请求内容
request_line = request.splitlines()[0]
info = request_line.decode().split(" ")[1]
print(connfd.getpeername(), ":", info)
if info == "/" or info[-5:] == ".html":
self.get_html(connfd, info)
else:
self.get_data(connfd,info)
def get_data(self,connfd,info):
response = "HTTP/1.1 200 ok\r\n"
response += "\r\n"
response += "<h1>Waiting for the HTTPSEVER 3.0<h1>"
connfd.send(response.encode())
def get_html(self,connfd,info):
if info == "/":
html_name = self.dir + "/index.html"
else:
html_name = self.dir + info
try:
obj = open(html_name)
except Exception:
response = "HTTP/1.1 404 not found\r\n"
response += "Content_Type:text/html\r\n"
response += "\r\n"
response += "<h1>sorry.....<h1>"
else:
response = "HTTP/1.1 200 OK\r\n"
response += "Content_Type:text/html\r\n"
response += "\r\n"
response += obj.read()
finally:
connfd.send(response.encode())
# 具体处理rlist里的监控信号
def do_rlist(self, rs):
for r in rs:
if r is self.sockfd:
connfd, addr = self.sockfd.accept()
print("Connect from ", addr)
self.rlist.append(connfd)
else:
self.handle(r)
if __name__ == "__main__":
# 希望通过HTTPSever类快速搭建http服务,用以展示自己的网页
# HOST = "0.0.0.0"
# PORT = 22222
# ADDR = (HOST, PORT)
# DIR = "./static"
HOST = "172.40.74.151"
PORT = 8888
DIR ="./hfklswn"
# 实例化对象
httpfd = HTTPSever(HOST, PORT, DIR)
# 启动HTTP服务
httpfd.server_forver()
| [
"[email protected]"
] | |
4d75a2fa3fbfcd227da641b06f2ce1f1a779e02e | 6a07912090214567f77e9cd941fb92f1f3137ae6 | /cs212/Unit 4/28.py | ae381957925468dc57906a2813b0cfd324dea8d0 | [] | no_license | rrampage/udacity-code | 4ab042b591fa3e9adab0183d669a8df80265ed81 | bbe968cd27da7cc453eada5b2aa29176b0121c13 | refs/heads/master | 2020-04-18T08:46:00.580903 | 2012-08-25T08:44:24 | 2012-08-25T08:44:24 | 5,352,942 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,983 | py | # cs212 ; Unit 4 ; 28
# -----------------
# User Instructions
#
# In this problem, you will generalize the bridge problem
# by writing a function bridge_problem3, that makes a call
# to lowest_cost_search.
def bridge_problem3(here):
"""Find the fastest (least elapsed time) path to
the goal in the bridge problem."""
# your code here
return lowest_cost_search() # <== your arguments here
# your code here if necessary
def lowest_cost_search(start, successors, is_goal, action_cost):
"""Return the lowest cost path, starting from start state,
and considering successors(state) => {state:action,...},
that ends in a state for which is_goal(state) is true,
where the cost of a path is the sum of action costs,
which are given by action_cost(action)."""
explored = set() # set of states we have visited
frontier = [ [start] ] # ordered list of paths we have blazed
while frontier:
path = frontier.pop(0)
state1 = final_state(path)
if is_goal(state1):
return path
explored.add(state1)
pcost = path_cost(path)
for (state, action) in successors(state1).items():
if state not in explored:
total_cost = pcost + action_cost(action)
path2 = path + [(action, total_cost), state]
add_to_frontier(frontier, path2)
return Fail
def final_state(path): return path[-1]
def path_cost(path):
"The total cost of a path (which is stored in a tuple with the final action)."
if len(path) < 3:
return 0
else:
action, total_cost = path[-2]
return total_cost
def add_to_frontier(frontier, path):
"Add path to frontier, replacing costlier path if there is one."
# (This could be done more efficiently.)
# Find if there is an old path to the final state of this path.
old = None
for i,p in enumerate(frontier):
if final_state(p) == final_state(path):
old = i
break
if old is not None and path_cost(frontier[old]) < path_cost(path):
return # Old path was better; do nothing
elif old is not None:
del frontier[old] # Old path was worse; delete it
## Now add the new path and re-sort
frontier.append(path)
frontier.sort(key=path_cost)
def bsuccessors2(state):
"""Return a dict of {state:action} pairs. A state is a (here, there) tuple,
where here and there are frozensets of people (indicated by their times) and/or
the light."""
here, there = state
if 'light' in here:
return dict(((here - frozenset([a, b, 'light']),
there | frozenset([a, b, 'light'])),
(a, b, '->'))
for a in here if a is not 'light'
for b in here if b is not 'light')
else:
return dict(((here | frozenset([a, b, 'light']),
there - frozenset([a, b, 'light'])),
(a, b, '<-'))
for a in there if a is not 'light'
for b in there if b is not 'light')
def bcost(action):
"Returns the cost (a number) of an action in the bridge problem."
# An action is an (a, b, arrow) tuple; a and b are times; arrow is a string
a, b, arrow = action
return max(a, b)
def test():
here = [1, 2, 5, 10]
assert bridge_problem3(here) == [
(frozenset([1, 2, 'light', 10, 5]), frozenset([])),
((2, 1, '->'), 2),
(frozenset([10, 5]), frozenset([1, 2, 'light'])),
((2, 2, '<-'), 4),
(frozenset(['light', 10, 2, 5]), frozenset([1])),
((5, 10, '->'), 14),
(frozenset([2]), frozenset([1, 10, 5, 'light'])),
((1, 1, '<-'), 15),
(frozenset([1, 2, 'light']), frozenset([10, 5])),
((2, 1, '->'), 17),
(frozenset([]), frozenset([1, 10, 2, 5, 'light']))]
return 'test passes'
print test()
| [
"[email protected]"
] | |
9abb3baada0faed6fe83d3c15b41aa7c7958cb80 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_27357.py | 1163c19de3fb005d7b6fa68a6a453f6f2e63147f | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | # pyplot.savefig with empty export
plt.show()
| [
"[email protected]"
] | |
2549b51f9b74bd83a48077d8573f285fddd9ebc2 | 70054615f56be28373b00c9df96544ec822be683 | /res/scripts/common/offers.py | d85a601ecaff58e94484a30537cc4c8545a98445 | [] | no_license | wanyancan/WOTDecompiled | c646ad700f5ec3fb81fb4e87862639ce0bdf0000 | 9ffb09007a61d723cdb28549e15db39c34c0ea1e | refs/heads/master | 2020-04-17T23:13:15.649069 | 2013-11-15T16:37:10 | 2013-11-15T16:37:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,339 | py | import time
from collections import namedtuple
import BigWorld
from constants import IS_BASEAPP
from debug_utils import *
ENTITY_TYPE_ACCOUNT = 0
ENTITY_TYPE_CLAN = 1
ENTITY_TYPE_NAMES_BY_IDS = ('Account', 'Clan')
ENTITY_TYPE_IDS_BY_NAMES = {'Account': ENTITY_TYPE_ACCOUNT,
'Clan': ENTITY_TYPE_CLAN}
ENTITY_TYPE_IDS = (ENTITY_TYPE_ACCOUNT, ENTITY_TYPE_CLAN)
OFFER_SELL = 0
_OFFER_KIND_MASK = 192
SRC_WARE_GOLD = 0
SRC_WARE_CREDITS = 256
SRC_WARE_ITEMS = 512
SRC_WARE_VEHICLE = 768
SRC_WARE_TANKMAN = 1024
SRC_WARE_KINDS = (SRC_WARE_GOLD,
SRC_WARE_CREDITS,
SRC_WARE_ITEMS,
SRC_WARE_VEHICLE,
SRC_WARE_TANKMAN)
SRC_WARE_MONEY_KINDS = (SRC_WARE_GOLD, SRC_WARE_CREDITS)
_SRC_WARE_KIND_MASK = 3840
DST_WARE_GOLD = 0
DST_WARE_CREDITS = 4096
DST_WARE_KINDS = (DST_WARE_GOLD, DST_WARE_CREDITS)
_DST_WARE_KIND_MASK = 61440
def makeOfferFlags(offerKind, srcWareKind, dstWareKind, srcEntityType, dstEntityType):
return offerKind | srcWareKind | dstWareKind | srcEntityType | dstEntityType << 3
ParsedOfferFlags = namedtuple('ParsedOfferFlags', 'offerKind srcWareKind dstWareKind srcEntityType dstEntityType')
def parseOfferFlags(flags):
raw = (flags & _OFFER_KIND_MASK,
flags & _SRC_WARE_KIND_MASK,
flags & _DST_WARE_KIND_MASK,
flags & 7,
flags >> 3 & 7)
return ParsedOfferFlags._make(raw)
def parseSrcEntityTypeFromFlags(flags):
return flags & 7
def parseDstEntityTypeFromFlags(flags):
return flags >> 3 & 7
class OutOffers(object):
Offer = namedtuple('Offer', 'flags dstDBID dstName srcWares dstWares validTill fee')
def __init__(self, offersDict, outWriterGetter = None):
offersDict.setdefault('nextID', 0)
offersDict.setdefault('done', {})
offersDict.setdefault('out', {})
self.__data = offersDict
self.__outWriter = outWriterGetter if outWriterGetter is not None else _WriterGetter(offersDict['out'])
return
def __getitem__(self, offerID):
return _makeOutOffer(self.__data['out'][offerID])
def get(self, offerID):
offer = self.__data['out'].get(offerID)
if offer is not None:
return _makeOutOffer(offer)
else:
return
def getExt(self, offerID, default = None):
outExt = self.__data.get('outExt')
if outExt is None:
return default
else:
return outExt.get(offerID, default)
def items(self):
return [ (id, _makeOutOffer(data)) for id, data in self.__data['out'].iteritems() ]
def clear(self):
self.__data['out'].clear()
self.__data['done'].clear()
self.__data.pop('outExt', None)
self.__data['nextID'] += 1
return
def count(self):
return len(self.__data['out'])
def doneOffers(self):
return self.__data['done']
def timedOutOffers(self):
res = []
currTime = int(time.time())
for offerID, offer in self.__data['out'].iteritems():
if offer[5] <= currTime:
res.append(offerID)
return res
def inventorySlots(self):
vehs = []
numTmen = 0
for offer in self.__data['out'].itervalues():
srcWareKind = offer[0] & _SRC_WARE_KIND_MASK
if srcWareKind == SRC_WARE_VEHICLE:
vehs.append(offer[3][0])
elif srcWareKind == SRC_WARE_TANKMAN:
numTmen += 1
return (vehs, numTmen)
def moveToDone(self, offerID):
data = self.__data
data['done'][offerID] = self.__outWriter().pop(offerID)
outExt = data.get('outExt')
if outExt is not None:
outExt.pop(offerID, None)
data['nextID'] += 1
return len(data['done'])
def remove(self, offerID):
if self.__outWriter().pop(offerID, None) is not None:
self.__data['nextID'] += 1
outExt = self.__data.get('outExt')
if outExt is not None:
outExt.pop(offerID, None)
return
def removeDone(self, offerID):
self.__data['done'].pop(offerID, None)
return
def updateDestination(self, offerID, dstEntityType, dstEntityDBID, dstEntityName):
raise self.__data['out'][offerID][1] == dstEntityDBID or AssertionError
def createOffer(self, flags, srcDBID, srcName, dstDBID, dstName, validSec, srcWares, srcFee, dstWares, dstFee, ext = None):
currTime = int(time.time())
validTill = currTime + int(validSec)
offer = (flags,
dstDBID,
dstName,
srcWares,
dstWares,
validTill,
srcFee)
data = self.__data
offerID = ((currTime & 1048575) << 12) + (data['nextID'] & 4095)
data['nextID'] += 1
if not (offerID not in data['out'] and offerID not in data['done']):
raise AssertionError
self.__outWriter()[offerID] = offer
data.setdefault('outExt', {})[offerID] = ext is not None and ext
return (offerID, (offerID,
flags,
srcDBID,
srcName,
srcWares,
dstWares,
validTill,
dstFee))
class InOffers(object):
Offer = namedtuple('Offer', 'srcOfferID flags srcDBID srcName srcWares dstWares validTill fee')
def __init__(self, offersDict, inWriterGetter = None):
offersDict.setdefault('nextID', 0)
offersDict.setdefault('in', {})
self.__data = offersDict
self.__inWriter = inWriterGetter if inWriterGetter is not None else _WriterGetter(offersDict['in'])
return
def __getitem__(self, offerID):
return _makeInOffer(self.__data['in'][offerID])
def get(self, offerID):
offer = self.__data['in'].get(offerID)
if offer is not None:
return _makeInOffer(offer)
else:
return
def items(self):
return [ (id, _makeOutOffer(data)) for id, data in self.__data['in'].iteritems() ]
def clear(self):
self.__data['in'].clear()
self.__data['nextID'] += 1
def count(self):
return len(self.__data['in'])
def timedOutOffers(self):
res = []
currTime = int(time.time())
for offerID, offer in self.__data['in'].iteritems():
if offer[6] <= currTime:
res.append(offerID)
return res
def findOfferBySource(self, srcEntityType, srcEntityDBID, srcOfferID):
for inOfferID, offer in self.__data['in'].iteritems():
if offer[0] == srcOfferID and offer[2] == srcEntityDBID and parseSrcEntityTypeFromFlags(offer[1]) == srcEntityType:
return inOfferID
return None
def add(self, offer):
data = self.__data
offerID = data['nextID']
data['nextID'] += 1
self.__inWriter()[offerID] = tuple(offer)
return offerID
def remove(self, offerID):
if self.__inWriter().pop(offerID, None) is not None:
self.__data['nextID'] += 1
return
def collectOutOfferResults(outOffer):
offerFlags = parseOfferFlags(outOffer.flags)
gold = 0
credits = 0
items = None
if offerFlags.srcWareKind == SRC_WARE_GOLD:
gold -= outOffer.srcWares + outOffer.fee
elif offerFlags.srcWareKind == SRC_WARE_CREDITS:
credits -= outOffer.srcWares + outOffer.fee
else:
items = outOffer.srcWares
if offerFlags.dstWareKind == DST_WARE_GOLD:
gold += outOffer.dstWares
else:
credits += outOffer.dstWares
return (offerFlags,
gold,
credits,
items)
def collectInOfferResults(inOffer):
offerFlags = parseOfferFlags(inOffer.flags)
gold = 0
credits = 0
items = None
if offerFlags.srcWareKind == SRC_WARE_GOLD:
gold += inOffer.srcWares
elif offerFlags.srcWareKind == SRC_WARE_CREDITS:
credits += inOffer.srcWares
else:
items = inOffer.srcWares
if offerFlags.dstWareKind == DST_WARE_GOLD:
gold -= inOffer.dstWares + inOffer.fee
else:
credits -= inOffer.dstWares + inOffer.fee
return (offerFlags,
gold,
credits,
items)
_makeOutOffer = OutOffers.Offer._make
_makeInOffer = InOffers.Offer._make
class _WriterGetter(object):
def __init__(self, dict):
self.__d = dict
def __call__(self):
return self.__d
| [
"[email protected]"
] | |
c37ff8cfcff227220d098069e2f3040dce7f56e8 | 9145d24e2517d7f3cea6e89158806b95919449b8 | /doc/conf.py | 37c50aca46644bd4ce262e466fa2696daa55957c | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | pombredanne/coveragepy | b6de846694156581ee0b9a3348f4cfd48719855f | 2364947d7814a065cf2c05d930eda94203b20f1c | refs/heads/master | 2021-01-22T23:43:21.800229 | 2017-03-18T11:14:13 | 2017-03-18T11:14:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,618 | py | # -*- coding: utf-8 -*-
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
#
# coverage.py documentation build configuration file, created by
# sphinx-quickstart on Wed May 13 22:18:33 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinxcontrib.spelling',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Coverage.py'
copyright = u'2009\N{EN DASH}2017, Ned Batchelder' # CHANGEME
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '4.3.4' # CHANGEME
# The full version, including alpha/beta/rc tags.
release = '4.3.4' # CHANGEME
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
#html_style = "neds.css"
#html_add_permalinks = ""
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_templates']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = '.htm'
# Output file base name for HTML help builder.
htmlhelp_basename = 'coveragepydoc'
# -- Spelling ---
spelling_word_list_filename = 'dict.txt'
spelling_show_suggestions = False
# When auto-doc'ing a class, write the class' docstring and the __init__ docstring
# into the class docs.
autoclass_content = "class"
prerelease = bool(max(release).isalpha())
def setup(app):
app.add_stylesheet('coverage.css')
app.add_config_value('prerelease', False, 'env')
app.info("** Prerelease = %r" % prerelease)
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.