hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ddee87075899569423a1b5f9ff3b0f5185a1f91c
| 3,677 |
py
|
Python
|
noname.py
|
schellenberg/lyric-grabber-for-plex
|
f449b480d4c234ff9d358fc7d9a0e729e9ed45e5
|
[
"MIT"
] | 26 |
2019-09-30T02:29:05.000Z
|
2022-01-17T20:10:54.000Z
|
noname.py
|
schellenberg/lyric-grabber-for-plex
|
f449b480d4c234ff9d358fc7d9a0e729e9ed45e5
|
[
"MIT"
] | 5 |
2019-10-02T18:53:48.000Z
|
2020-05-06T14:07:25.000Z
|
noname.py
|
schellenberg/lyric-grabber-for-plex
|
f449b480d4c234ff9d358fc7d9a0e729e9ed45e5
|
[
"MIT"
] | 6 |
2019-09-30T13:04:22.000Z
|
2022-03-24T17:47:16.000Z
|
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Oct 26 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class frameMain
###########################################################################
class frameMain ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"Plex Lyric Grabber", pos = wx.DefaultPosition, size = wx.Size( 800,600 ), style = wx.CAPTION|wx.CLOSE_BOX|wx.SYSTEM_MENU|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
bSizerFrameMain = wx.BoxSizer( wx.VERTICAL )
bSizerMainFrame = wx.BoxSizer( wx.VERTICAL )
self.m_panelMain = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
bSizerMainPanel = wx.BoxSizer( wx.VERTICAL )
bSizerPanelMain = wx.BoxSizer( wx.VERTICAL )
bSizerFolderSelect = wx.BoxSizer( wx.VERTICAL )
self.m_staticTextInstructions = wx.StaticText( self.m_panelMain, wx.ID_ANY, u"Select your Plex music library folder...", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticTextInstructions.Wrap( -1 )
bSizerFolderSelect.Add( self.m_staticTextInstructions, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_dirPicker1 = wx.DirPickerCtrl( self.m_panelMain, wx.ID_ANY, wx.EmptyString, u"Select a folder", wx.DefaultPosition, wx.DefaultSize, wx.DIRP_DEFAULT_STYLE )
bSizerFolderSelect.Add( self.m_dirPicker1, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_buttonSearch = wx.Button( self.m_panelMain, wx.ID_ANY, u"Begin Lyric Search", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizerFolderSelect.Add( self.m_buttonSearch, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_staticTextTimeWarning = wx.StaticText( self.m_panelMain, wx.ID_ANY, u"Note that selecting a large folder of music can cause the search to take a very long time...", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticTextTimeWarning.Wrap( -1 )
self.m_staticTextTimeWarning.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_ITALIC, wx.FONTWEIGHT_NORMAL, False, wx.EmptyString ) )
bSizerFolderSelect.Add( self.m_staticTextTimeWarning, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizerFolderSelect.Add( ( 0, 30), 0, 0, 5 )
self.m_staticText6 = wx.StaticText( self.m_panelMain, wx.ID_ANY, u"Log", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText6.Wrap( -1 )
self.m_staticText6.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD, False, wx.EmptyString ) )
bSizerFolderSelect.Add( self.m_staticText6, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_staticTextLog = wx.StaticText( self.m_panelMain, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 750,365 ), 0 )
self.m_staticTextLog.Wrap( -1 )
bSizerFolderSelect.Add( self.m_staticTextLog, 0, wx.ALL|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizerPanelMain.Add( bSizerFolderSelect, 1, wx.EXPAND, 5 )
bSizerMainPanel.Add( bSizerPanelMain, 1, wx.ALL|wx.EXPAND, 0 )
self.m_panelMain.SetSizer( bSizerMainPanel )
self.m_panelMain.Layout()
bSizerMainPanel.Fit( self.m_panelMain )
bSizerMainFrame.Add( self.m_panelMain, 1, wx.EXPAND|wx.ALL, 0 )
bSizerFrameMain.Add( bSizerMainFrame, 1, wx.ALL|wx.EXPAND, 0 )
self.SetSizer( bSizerFrameMain )
self.Layout()
self.Centre( wx.BOTH )
def __del__( self ):
pass
| 39.537634 | 213 | 0.689421 | 3,162 | 0.85994 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.184389 |
ddefc795048cf6d437ec89f16bc748675c74df14
| 1,371 |
py
|
Python
|
feature_extraction/cli.py
|
widoptimization-willett/feature-extraction
|
25e963e3383673aad6aedfd504e69a1df7f47f9a
|
[
"Apache-2.0"
] | null | null | null |
feature_extraction/cli.py
|
widoptimization-willett/feature-extraction
|
25e963e3383673aad6aedfd504e69a1df7f47f9a
|
[
"Apache-2.0"
] | null | null | null |
feature_extraction/cli.py
|
widoptimization-willett/feature-extraction
|
25e963e3383673aad6aedfd504e69a1df7f47f9a
|
[
"Apache-2.0"
] | null | null | null |
import json
import click
from tqdm import tqdm
import numpy as np
from PIL import Image
from skimage.exposure import rescale_intensity
from . import extraction, pipeline
np.seterr(all='raise')
@click.command()
@click.argument('pipeline_manifest', required=True)
@click.argument('files', nargs=-1, required=True) # unlimited number of args can be passed (eg. globbing)
@click.option('-o', '--output', default='features.json')
def extract_features(pipeline_manifest, files, output):
preprocess_options, pipe, postprocess_options = pipeline.construct_from_manifest(open(pipeline_manifest))
# TODO(liam): replace this with sending to celery and joining
X_raw = [] # raw feature matrix
for filename in tqdm(files):
# -- load data
# load with Pillow, convert to a numpy array, rescale to 16 bits of depth
im = rescale_intensity(np.array(Image.open(filename)), 'dtype', 'uint16')
assert im.ndim == 2 # rank should be 2 if we're only considering grayscale images
# -- preprocess
im = extraction.image_preprocessing(im, preprocess_options)
# -- extract features
x = extraction.extract_features(im, pipe)
# -- add to the feature vector
X_raw.append(x)
X = extraction.feature_postprocessing(np.array(X_raw), postprocess_options)
feature_map = dict(zip(files,
map(lambda x: x.tolist(), X)))
json.dump(feature_map, open(output, 'w'), indent=4)
| 32.642857 | 106 | 0.745441 | 0 | 0 | 0 | 0 | 1,173 | 0.85558 | 0 | 0 | 430 | 0.31364 |
ddeff9d5eb6e649b509f4b345051a62872b3798f
| 186 |
py
|
Python
|
homepage/templatetags/infos_tags.py
|
phodal/phodaldev
|
b5a48339a21b5674a70d284a85ef8c45e010fe43
|
[
"MIT"
] | 94 |
2015-01-28T15:46:02.000Z
|
2020-11-02T12:56:15.000Z
|
homepage/templatetags/infos_tags.py
|
phodal/phodaldev
|
b5a48339a21b5674a70d284a85ef8c45e010fe43
|
[
"MIT"
] | 56 |
2015-04-05T03:18:41.000Z
|
2021-08-29T00:50:57.000Z
|
homepage/templatetags/infos_tags.py
|
phodal/phodaldev
|
b5a48339a21b5674a70d284a85ef8c45e010fe43
|
[
"MIT"
] | 38 |
2015-08-26T08:10:12.000Z
|
2021-06-11T19:36:31.000Z
|
from mezzanine import template
from homepage.models import Info
register = template.Library()
@register.as_tag
def about():
about = Info.objects.get(type="ABOUT")
return about
| 18.6 | 42 | 0.747312 | 0 | 0 | 0 | 0 | 89 | 0.478495 | 0 | 0 | 7 | 0.037634 |
ddf10fa882b2377b78f180954bd012323f534965
| 514 |
py
|
Python
|
test/unit/test_main.py
|
CMPUT291PROJECT1F18/Mini-Project-1
|
b58144dd80c40466de755877b7c3996f4aa67af9
|
[
"MIT"
] | 1 |
2018-11-06T01:04:13.000Z
|
2018-11-06T01:04:13.000Z
|
test/unit/test_main.py
|
CMPUT291PROJECT1F18/Mini-Project-1
|
b58144dd80c40466de755877b7c3996f4aa67af9
|
[
"MIT"
] | 39 |
2018-10-23T00:28:13.000Z
|
2018-11-06T16:14:56.000Z
|
test/unit/test_main.py
|
CMPUT291PROJECT1F18/Mini-Project-1
|
b58144dd80c40466de755877b7c3996f4aa67af9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""pytests for :mod:`.__main__`"""
from mini_project_1.__main__ import get_parser, main
import mock
def test_get_parser():
parser = get_parser()
assert parser
def test_main(tmpdir):
tmp_file = tmpdir.join("thefile_name.json")
tmp_file_name = str(tmp_file)
with mock.patch('builtins.input', return_value='foo'):
with mock.patch('mini_project_1.shell.MiniProjectShell.cmdloop', return_value='bar'):
main(["-i", tmp_file_name])
| 23.363636 | 93 | 0.680934 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 170 | 0.330739 |
ddf357f8f64530a6e0a779ab33c258cb0322ac3e
| 189 |
py
|
Python
|
kafka/1/consumer/kaktatest.py
|
adriancarriger/experiments
|
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
|
[
"MIT"
] | 1 |
2021-06-22T13:38:36.000Z
|
2021-06-22T13:38:36.000Z
|
kafka/1/consumer/kaktatest.py
|
adriancarriger/experiments
|
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
|
[
"MIT"
] | 108 |
2019-05-23T16:12:32.000Z
|
2020-09-04T15:47:33.000Z
|
kafka/1/consumer/kaktatest.py
|
adriancarriger/experiments
|
7e4248592dc8fbb08522c9b5f0393c80dc7e2699
|
[
"MIT"
] | null | null | null |
from kafka import KafkaConsumer
consumer = KafkaConsumer('myTestTopic', bootstrap_servers='localhost:9092')
for item in consumer:
print("The Message is :", item)
# https://kafka-1:9092
| 31.5 | 75 | 0.756614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 69 | 0.365079 |
ddf3aa22469fb9543ce65c54187ad310deed6e44
| 67,701 |
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204 |
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468 |
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309 |
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"instance": {
"default": {
"vrf": {
"L3VPN-0050": {
"address_family": {
"vpnv4": {
"default_vrf": "L3VPN-0050",
"prefixes": {
"10.4.1.0/24": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2467",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933597",
},
"10.44.105.0/24": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6620",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933498",
},
"172.16.100.10/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2904",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933541",
},
"172.16.100.11/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2903",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933540",
},
"172.16.100.12/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2901",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933539",
},
"172.16.100.13/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5466",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933538",
},
"172.16.100.14/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5465",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933537",
},
"172.16.100.15/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5464",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933536",
},
"172.16.100.16/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2914",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933551",
},
"172.16.100.17/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2913",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933550",
},
"172.16.100.18/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2912",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933549",
},
"172.16.100.19/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2911",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933548",
},
"172.16.100.2/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"community": "62000:2",
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2468",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933598",
},
"172.16.100.20/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2910",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933547",
},
"172.16.100.21/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2909",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933546",
},
"172.16.100.22/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2908",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933545",
},
"172.16.100.23/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2907",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933544",
},
"172.16.100.24/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2923",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933559",
},
"172.16.100.25/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2922",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933558",
},
"172.16.100.26/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2920",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933557",
},
"172.16.100.27/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2919",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933556",
},
"172.16.100.28/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2918",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933555",
},
"172.16.100.29/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2917",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933554",
},
"172.16.100.3/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5463",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933535",
},
"172.16.100.30/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2916",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933553",
},
"172.16.100.31/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2915",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933552",
},
"172.16.100.32/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6630",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933506",
},
"172.16.100.33/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6629",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933505",
},
"172.16.100.34/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6627",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933504",
},
"172.16.100.35/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6626",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933503",
},
"172.16.100.36/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6625",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933502",
},
"172.16.100.37/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6624",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933501",
},
"172.16.100.38/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6623",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933500",
},
"172.16.100.4/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5462",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933534",
},
"172.16.100.5/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5461",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933533",
},
"172.16.100.6/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5460",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933532",
},
"172.16.100.7/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5459",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933531",
},
"172.16.100.8/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2906",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933543",
},
"172.16.100.9/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2905",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933542",
},
},
"route_distinguisher": "5918:50",
}
}
}
}
}
}
}
| 60.936994 | 86 | 0.203084 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 17,587 | 0.259775 |
ddf4a85d16651173e5cecae5af1d1cdea3c48b47
| 154 |
py
|
Python
|
pages/apps.py
|
slideclimb/django-template
|
c489c118c47fe3510f7d5a36781d2d8835ef5ba6
|
[
"MIT"
] | 2 |
2019-07-11T08:24:20.000Z
|
2021-09-16T22:08:42.000Z
|
pages/apps.py
|
slideclimb/django-template
|
c489c118c47fe3510f7d5a36781d2d8835ef5ba6
|
[
"MIT"
] | 3 |
2021-06-09T17:17:16.000Z
|
2021-06-10T17:25:04.000Z
|
pages/apps.py
|
slideclimb/django-template
|
c489c118c47fe3510f7d5a36781d2d8835ef5ba6
|
[
"MIT"
] | 1 |
2018-10-07T15:32:27.000Z
|
2018-10-07T15:32:27.000Z
|
""" This file is here to register apps. """
from django.apps import AppConfig
class HomeConfig(AppConfig):
""" Register app. """
name = 'pages'
| 19.25 | 43 | 0.655844 | 73 | 0.474026 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.461039 |
ddf5245a022beb682381f18774e07b7d784a9065
| 1,150 |
py
|
Python
|
Tests/test_protected_views.py
|
ShavImigne/PlayChess
|
1eb0dcaf1d58b02bdc880f8e51857a87696ee461
|
[
"MIT"
] | 12 |
2018-05-02T13:13:41.000Z
|
2022-03-13T15:37:29.000Z
|
Tests/test_protected_views.py
|
ShavImigne/PlayChess
|
1eb0dcaf1d58b02bdc880f8e51857a87696ee461
|
[
"MIT"
] | 24 |
2018-05-03T13:06:56.000Z
|
2019-08-13T14:49:03.000Z
|
Tests/test_protected_views.py
|
ShavImigne/PlayChess
|
1eb0dcaf1d58b02bdc880f8e51857a87696ee461
|
[
"MIT"
] | 17 |
2018-05-03T13:46:40.000Z
|
2021-06-26T13:57:05.000Z
|
from .client import client
from .config import db_pass, db_user
import string
import random
def test_login_logout(client):
return_login_status = client.post('/login', data={"username": db_user, "password": db_pass})
# Redirected to home
assert return_login_status.status_code==302
# Now that user is logged in, he can access home screen
home_view = client.get('/')
assert home_view.status_code==200
# Making sure logged in user cannot access logged out views by making sure
# they have a status of 302
random_username = "".join(random.choices(string.ascii_lowercase, k=20))
login_view = client.get('/login')
verify_view = client.get('/verify/'+random_username)
retry_verify_view = client.get('/verify/retry')
assert (login_view.status_code==302 and verify_view.status_code==302 and retry_verify_view.status_code==302)
# logout (only possible for logged in users)
return_logout_status = client.get('/logout')
assert return_logout_status.status_code==302
# Now home screen should be inaccessbile
return_home_view = client.get('/')
assert return_home_view.status_code!=200
| 41.071429 | 112 | 0.736522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 339 | 0.294783 |
ddf773cd5e3b923f5c8398a0b49c16c0818baa89
| 1,575 |
py
|
Python
|
Tiny-Blockchain/Blockchain.py
|
gusjasponde/Tiny-Blockchain
|
e1abe11f70b845cc461afd329c54d5d7d79fd9c8
|
[
"MIT"
] | 2 |
2021-04-17T17:36:45.000Z
|
2021-06-22T21:48:54.000Z
|
Tiny-Blockchain/Blockchain.py
|
gusjasponde/Tiny-Blockchain
|
e1abe11f70b845cc461afd329c54d5d7d79fd9c8
|
[
"MIT"
] | null | null | null |
Tiny-Blockchain/Blockchain.py
|
gusjasponde/Tiny-Blockchain
|
e1abe11f70b845cc461afd329c54d5d7d79fd9c8
|
[
"MIT"
] | 1 |
2017-10-24T00:53:49.000Z
|
2017-10-24T00:53:49.000Z
|
import hashlib
import json
import datetime
import Util
#Defining the block into our blockchain
class Block:
def __init__(self, index, timestamp, data, previous_hash):
self.index = index
self.timestamp = timestamp.isoformat()
self.data = data
self.previous_hash = previous_hash
self.hash = self.hash_block().encode('utf8')
def hash_block(self):
sha = hashlib.sha256()
blkstr = (str(self.index) +
str(self.timestamp) +
str(self.data) +
str(self.previous_hash))
sha.update(blkstr.encode('utf-8'))
return sha.hexdigest()
def reprJSON(self):
return dict(index=self.index, timestamp=self.timestamp, data=self.data, previous_hash=self.previous_hash, hash=self.hash)
#Genesis block creator
def create_genesis_block():
return Block(0, datetime.datetime.now(), "Genesis block", "0")
def next_block(last_block):
this_data = json.dumps({
"proof-of-work": last_block.index + 1,
"transactions": "Initial block"
})
return Block(last_block.index + 1, datetime.datetime.now(), this_data, last_block.hash)
#Code for running the blockchain
blockchain = [create_genesis_block()]
previous_block = blockchain[0]
genesis_blocks_qty = 10
#adding blocks in the whole chain
for i in range(0, genesis_blocks_qty):
block_to_add = next_block(previous_block)
blockchain.append(block_to_add)
previous_block = block_to_add
print "Block #",block_to_add.index," added"
print "Hash: ",block_to_add.hash
| 30.288462 | 129 | 0.675556 | 711 | 0.451429 | 0 | 0 | 0 | 0 | 0 | 0 | 226 | 0.143492 |
ddf791c1dd4726087d87f8647d381ae32e01c53c
| 437 |
py
|
Python
|
6. Heap exploitation/exploit_3.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | null | null | null |
6. Heap exploitation/exploit_3.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | null | null | null |
6. Heap exploitation/exploit_3.py
|
MBWlodarczyk/bso_project
|
a4620fb18d7f789d917627232dc85ef9bcad7e3d
|
[
"MIT"
] | 1 |
2021-05-27T22:04:35.000Z
|
2021-05-27T22:04:35.000Z
|
from pwn import *
def malloc(name):
p.sendlineafter('>> ','1 '+name)
def free(id):
p.sendlineafter('>> ', '2 '+str(id))
p = process("./vuln_3.o")
gdb.attach(p)
for i in range(8):
malloc('a')
malloc('a')
malloc('a')
for i in range(9):
free(i-1)
free(8)
free(9)
free(8)
for i in range(8):
malloc('a')
malloc('\x48\xc0\x04\x08')
malloc('1')
malloc('1')
p.sendlineafter('>> ','1 ' +'admin')
p.interactive()
| 11.5 | 40 | 0.567506 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.187643 |
ddf7f46a03e2f875d36d3fadd6c70b90528b78f0
| 1,202 |
py
|
Python
|
setup.py
|
originell/sentry-twilio
|
716c444649b38b68f9d6a02986de090bb7e580b9
|
[
"MIT"
] | null | null | null |
setup.py
|
originell/sentry-twilio
|
716c444649b38b68f9d6a02986de090bb7e580b9
|
[
"MIT"
] | 1 |
2017-02-01T16:49:25.000Z
|
2017-02-01T16:49:25.000Z
|
setup.py
|
originell/sentry-twilio
|
716c444649b38b68f9d6a02986de090bb7e580b9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
sentry-twilio
=============
Sentry Notification plugin for Twilio Programmable SMS.
:copyright: 2017 Luis Nell.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
from setuptools import setup, find_packages
VERSION = '1.0'
install_requires = [
'twilio==6.0.0rc10',
]
setup(
name='sentry-twilio',
version=VERSION,
author='Luis Nell',
author_email='[email protected]',
url='https://github.com/originell/sentry-twilio',
description='Sentry Notification plugin for Twilio Programmable SMS.',
long_description=__doc__,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
entry_points={
'sentry.apps': [
'twilio = sentry_twilio',
],
'sentry.plugins': [
'twilio = sentry_twilio.plugin:TwilioPlugin',
],
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| 24.04 | 74 | 0.645591 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 620 | 0.515807 |
ddf8e32544c5c3cd5719527188568b3163339c08
| 3,146 |
py
|
Python
|
tests/test_refresh_subscription.py
|
Fak3/websubsub
|
f7ca8953197104483b152367c716028d841facbb
|
[
"MIT"
] | 4 |
2018-03-18T03:44:24.000Z
|
2019-08-10T00:40:54.000Z
|
tests/test_refresh_subscription.py
|
Fak3/websubsub
|
f7ca8953197104483b152367c716028d841facbb
|
[
"MIT"
] | 9 |
2020-02-12T13:38:38.000Z
|
2021-06-02T01:03:43.000Z
|
tests/test_refresh_subscription.py
|
Fak3/websubsub
|
f7ca8953197104483b152367c716028d841facbb
|
[
"MIT"
] | 2 |
2019-08-10T00:41:00.000Z
|
2020-02-09T10:09:59.000Z
|
import re
from datetime import timedelta
import responses
from django.test import override_settings
from django.utils.timezone import now
from model_mommy.mommy import make
from websubsub.models import Subscription
from websubsub.tasks import refresh_subscriptions, retry_failed
from .base import BaseTestCase, method_url_body
class RefreshSubscriptionsTest(BaseTestCase):
"""
When refresh_subscriptions() task is called, then only verified Subscription with
lease_expiration_time ending soon should be subscribed again.
"""
def test_refresh(self):
# GIVEN hub which returns HTTP_202_ACCEPTED
responses.add('POST', 'http://hub.io', status=202)
# AND verified Subscription with expiration time in 3 hours
torefresh = make(Subscription,
hub_url='http://hub.io',
topic='news-topic1',
callback_urlname='wscallback',
lease_expiration_time=now() + timedelta(hours=3),
subscribe_status='verified'
)
# AND explicitly unsubscribed verified Subscription with
# expiration time in 3 hours
unsubscribed = make(Subscription,
hub_url='http://hub.io',
topic='news-topic2',
callback_urlname='wscallback',
lease_expiration_time=now() + timedelta(hours=3),
subscribe_status='verified',
unsubscribe_status='verified'
)
# AND verified Subscription with expiration time in 3 days
fresh = make(Subscription,
hub_url='http://hub.io',
topic='news-topic3',
callback_urlname='wscallback',
lease_expiration_time=now() + timedelta(days=3),
subscribe_status='verified'
)
# AND non-verified Subscription with expiration time in 3 hours
unverified = make(Subscription,
hub_url='http://hub.io',
topic='news-topic4',
callback_urlname='wscallback',
lease_expiration_time=now() + timedelta(hours=3),
subscribe_status='requesting'
)
# WHEN refresh_subscriptions task is called
refresh_subscriptions.delay()
#retry_failed.delay()
# THEN no new Subscription should get created
assert len(Subscription.objects.all()) == 4
torefresh = Subscription.objects.get(id=torefresh.id)
# AND one POST request to hub should be sent
self.assertEqual([method_url_body(x) for x in responses.calls],
[
('POST', 'http://hub.io/', {
'hub.mode': ['subscribe'],
'hub.topic': [torefresh.topic],
'hub.callback': [torefresh.callback_url]
}),
]
)
# AND only this subscription_status should be changed from `verified` to `verifying`
assert dict(Subscription.objects.values_list('id', 'subscribe_status')) == {
torefresh.id: 'verifying', # changed
unsubscribed.id: 'verified',
fresh.id: 'verified',
unverified.id: 'requesting',
}
| 35.75 | 92 | 0.61602 | 2,814 | 0.894469 | 0 | 0 | 0 | 0 | 0 | 0 | 1,083 | 0.344247 |
ddf9fcd921f244664cbca84e5f6bac067c77d492
| 281 |
py
|
Python
|
ares/physics/__init__.py
|
astrojhgu/ares
|
42008c8e4bf79f0b000cc833e02a86510bce7611
|
[
"MIT"
] | 1 |
2019-01-04T15:13:18.000Z
|
2019-01-04T15:13:18.000Z
|
ares/physics/__init__.py
|
astrojhgu/ares
|
42008c8e4bf79f0b000cc833e02a86510bce7611
|
[
"MIT"
] | null | null | null |
ares/physics/__init__.py
|
astrojhgu/ares
|
42008c8e4bf79f0b000cc833e02a86510bce7611
|
[
"MIT"
] | null | null | null |
import Constants
from .Hydrogen import Hydrogen
from .Cosmology import Cosmology
from .HaloMassFunction import HaloMassFunction
from .RateCoefficients import RateCoefficients
from .SecondaryElectrons import SecondaryElectrons
from .CrossSections import PhotoIonizationCrossSection
| 35.125 | 54 | 0.886121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
ddfc5813cf287a659f99142896404836acf1a2ad
| 9,452 |
py
|
Python
|
src/agents/base.py
|
anindex/drqn-study
|
ab357178bbe6a1e09eda0f19583e8e8444bf4a54
|
[
"MIT"
] | null | null | null |
src/agents/base.py
|
anindex/drqn-study
|
ab357178bbe6a1e09eda0f19583e8e8444bf4a54
|
[
"MIT"
] | null | null | null |
src/agents/base.py
|
anindex/drqn-study
|
ab357178bbe6a1e09eda0f19583e8e8444bf4a54
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import numpy as np
import random
from collections import deque
import torch
from os.path import exists
from torch.nn.functional import smooth_l1_loss, mse_loss # noqa
from torch.optim import Adam, Adagrad, RMSprop # noqa
from tensorboardX import SummaryWriter
def adjust_learning_rate(optimizer, lr):
for param_group in optimizer.param_groups:
param_group['lr'] = lr
class Agent(object):
def __init__(self, env_prototype, model_prototype, memory_prototype=None, **kwargs):
# env
self.env_prototype = env_prototype
self.env_params = kwargs.get('env')
self.env = self.env_prototype(**self.env_params)
self.state_shape = self.env.state_shape
self.action_dim = self.env.action_dim
# model
self.model_prototype = model_prototype
self.model_params = kwargs.get('model')
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.model_params['use_cuda'] = torch.cuda.is_available()
self.model_params['stack_len'] = self.env_params['stack_len']
self.model_params['state_shape'] = (self.model_params['stack_len'] * self.state_shape[0], *self.state_shape[1:]) # state_shape in model: (stack_len * C, H, W)
self.model_params['action_dim'] = self.action_dim
self.model = None
# memory
self.memory_prototype = memory_prototype
self.memory_type = kwargs.get('memory_type', 'random')
self.memory_params = kwargs.get('memory')
self.memory = None
random.seed(self.env_params['seed'])
# logging
self.logger = kwargs.get('logger', logging.getLogger(__name__))
self.model_file = self.model_params.get('model_file', None)
self.log_folder = kwargs.get('log_folder', 'logs')
self.use_tensorboard = kwargs.get('use_tensorboard', True)
if self.use_tensorboard:
self.writer = SummaryWriter(self.log_folder)
self.log_step_interval = kwargs.get('log_step_interval', 100)
self.log_episode_interval = kwargs.get('log_episode_interval', 10)
self.train_visualize = kwargs.get('train_visualize', False)
self.save_best = kwargs.get('save_best', True)
if self.save_best:
self.best_step = None # NOTE: achieves best_reward at this step
self.best_reward = None # NOTE: only save a new model if achieves higher reward
self.retrain = kwargs.get('retrain', True)
self.solved_stop = kwargs.get('solved_stop', True)
self.log_window_size = kwargs.get('log_window_size', 100)
self._reset_training_loggings()
# agent_params
# criteria and optimizer
self.value_criteria = eval(kwargs.get('value_criteria', 'mse_loss'))
self.optimizer_class = eval(kwargs.get('optimizer', 'Adam'))
# hyperparameters
self.episodes = kwargs.get('episodes', 100000)
self.steps = kwargs.get('steps', 22000000)
self.random_eps = kwargs.get('random_eps', 50)
self.learn_start = kwargs.get('learn_start', 1000) # num steps to fill the memory
self.gamma = kwargs.get('gamma', 0.99)
self.clip_grad = kwargs.get('clip_grad', float('inf'))
self.lr = kwargs.get('lr', 0.0001)
self.lr_decay = kwargs.get('lr_decay', False)
self.weight_decay = kwargs.get('weight_decay', 0.)
self.eps_start = kwargs.get('eps_start', 1.0)
self.eps_decay = kwargs.get('eps_decay', 50000) # num of decaying steps
self.eps_end = kwargs.get('eps_end', 0.01)
self.prog_freq = kwargs.get('prog_freq', 2500)
self.train_interval = kwargs.get('train_interval', 1)
self.memory_interval = kwargs.get('memory_interval', 1)
self.action_repetition = kwargs.get('action_repetition', 1)
self.test_nepisodes = kwargs.get('test_nepisodes', 1)
self.target_model_update = kwargs.get('target_model_update', 1000) # update every # steps
self.batch_size = kwargs.get('batch_size', 32)
self.bootstrap_type = kwargs.get('bootstrap_type', 'double_q')
# count step & episode
self.step = 0
self.episode = 0
def _load_model(self):
if self.model_file is not None and exists(self.model_file):
self.model.load_state_dict(torch.load(self.model_file))
self.logger.info('Loaded Model: ' + self.model_file)
else:
self.logger.info('No pretrained Model. Will train from scratch.')
def _save_model(self, step, curr_reward):
if self.model is None:
return
if self.save_best:
if self.best_step is None:
self.best_step = step
self.best_reward = curr_reward
if curr_reward >= self.best_reward:
self.best_step = step
self.best_reward = curr_reward
torch.save(self.model.state_dict(), self.model_file)
self.logger.info('Saved model: %s at best steps: %d and best reward: %d '
% (self.model_file, self.best_step, self.best_reward))
else:
torch.save(self.model.state_dict(), self.model_file)
self.logger.info('Saved model: %s after %d steps: ' % (self.model_file, step))
def _visualize(self, visualize=True):
if visualize:
self.env.visual()
self.env.render()
def _reset_training_loggings(self):
self.window_scores = deque(maxlen=self.log_window_size)
self.window_max_abs_q = deque(maxlen=self.log_window_size)
self.max_abs_q_log = [0] # per step
self.loss_log = [0] # per step
self.total_avg_score_log = [0] # per eps
self.run_avg_score_log = [0] # per eps
self.step_log = [0]
self.eps_log = [0]
def _reset_experiences(self):
self.env.reset()
if self.memory is not None:
self.memory.reset()
def _init_model(self, training=False):
self.model = self.model_prototype(name='Current Model', **self.model_params).to(self.device)
if not self.retrain:
self._load_model() # load pretrained model if provided
self.model.train(mode=training)
if training:
# target_model
self.target_model = self.model_prototype(name='Target Model', **self.model_params).to(self.device)
self._update_target_model_hard()
self.target_model.eval()
# memory
if self.memory_prototype is not None:
self.memory = self.memory_prototype(**self.memory_params)
# experience & states
self._reset_experiences()
def _store_episode(self, episode):
if self.memory_type == 'episodic':
self.memory.add_episode(episode)
else:
self.logger.warn('Only episodic memory can add episode!')
def _store_experience(self, experience, error=0.):
# Store most recent experience in memory.
if self.step % self.memory_interval == 0:
if self.memory_type == 'episodic':
self.memory.add(experience)
elif self.memory_type == 'random':
self.memory.add(experience, error)
else:
raise ValueError('Memory type %s is unsupported!' % self.memory_type)
# Hard update every `target_model_update` steps.
def _update_target_model_hard(self):
self.target_model.load_state_dict(self.model.state_dict())
# Soft update with `(1 - target_model_update) * old + target_model_update * new`.
def _update_target_model_soft(self):
for target_param, local_param in zip(self.target_model.parameters(), self.model.parameters()):
target_param.data.copy_(self.target_model_update * local_param.data + (1.0 - self.target_model_update) * target_param.data)
def _epsilon_greedy(self, q_values):
self.eps = self.eps_end + max(0, (self.eps_start - self.eps_end) * (self.eps_decay - self.episode) / self.eps_decay)
# choose action
if np.random.uniform() < self.eps: # then we choose a random action
action = random.randrange(self.action_dim)
else: # then we choose the greedy action
if self.model_params['use_cuda']:
action = np.argmax(q_values.cpu().numpy())
else:
action = np.argmax(q_values.numpy())
return action
def _create_zero_lstm_hidden(self, batch_size=1):
return (torch.zeros(self.model.num_lstm_layer, batch_size, self.model.hidden_dim).type(self.dtype).to(self.device),
torch.zeros(self.model.num_lstm_layer, batch_size, self.model.hidden_dim).type(self.dtype).to(self.device))
def _get_loss(self, experiences):
raise NotImplementedError()
def _forward(self, states):
raise NotImplementedError()
def _backward(self, experience):
raise NotImplementedError()
def fit_model(self): # training
raise NotImplementedError()
def test_model(self): # testing pre-trained models
raise NotImplementedError()
def set_seed(self, seed=0):
random.seed(seed)
self.env.env.seed(seed)
@property
def dtype(self):
return self.model.dtype
| 44.375587 | 167 | 0.643779 | 8,942 | 0.946043 | 0 | 0 | 62 | 0.006559 | 0 | 0 | 1,628 | 0.172239 |
ddfd26bd635a43dc9642fa41b92d7f6a9fd3de78
| 1,449 |
py
|
Python
|
mongo_commander/widgets.py
|
thieman/mongo_commander
|
407cccb1abdf16b3eb07f813f5dd5d9152930f4d
|
[
"MIT"
] | 2 |
2015-02-22T04:52:22.000Z
|
2018-12-14T10:33:25.000Z
|
mongo_commander/widgets.py
|
thieman/mongo_commander
|
407cccb1abdf16b3eb07f813f5dd5d9152930f4d
|
[
"MIT"
] | null | null | null |
mongo_commander/widgets.py
|
thieman/mongo_commander
|
407cccb1abdf16b3eb07f813f5dd5d9152930f4d
|
[
"MIT"
] | null | null | null |
"""Widgets abstract out common View rendering patterns like displaying
a list of logging messages or a bar chart. They typically take the ClusterData
object, a window, and a list of keys they should care about from ClusterData.
They then draw directly onto the window."""
from operator import itemgetter
from .curses_util import movedown, movex
class Widget(object):
def __init__(self, data):
self.data = data
self.source_keys = []
def apply_to_window(self, window):
raise NotImplementedError()
class StreamWidget(Widget):
"""Display line-by-line text data from a stream."""
def _gather_data(self):
return reduce(list.__add__, map(lambda key: self.data.get(key, []), self.source_keys))
def apply_to_window(self, window):
data_for_render = self._gather_data()
if not data_for_render:
return
window.move(0, 0)
first_jump = len(data_for_render[0]['time'].strftime('%c')) + 3
second_jump = first_jump + max(map(len, map(itemgetter('node_name'), data_for_render))) + 3
for datum in sorted(data_for_render, key=itemgetter('time'))[-10:]:
window.addstr('{} - '.format(datum['time'].strftime('%c')))
movex(window, first_jump)
window.addstr('{} - '.format(datum['node_name']))
movex(window, second_jump)
window.addstr(str(datum['data']).strip())
movedown(window, x=0)
| 39.162162 | 99 | 0.655625 | 1,098 | 0.757764 | 0 | 0 | 0 | 0 | 0 | 0 | 390 | 0.269151 |
ddfe5b6a2bd63f44708eacd4d1f196837c88804e
| 958 |
py
|
Python
|
tests/feature_propagation_test.py
|
emalgorithm/feature-propagation
|
de9ec54d5c035abe8d52d6ac4079156cc537e489
|
[
"Apache-2.0"
] | 20 |
2022-03-09T00:06:23.000Z
|
2022-03-18T09:59:36.000Z
|
tests/feature_propagation_test.py
|
emalgorithm/feature-propagation
|
de9ec54d5c035abe8d52d6ac4079156cc537e489
|
[
"Apache-2.0"
] | 2 |
2022-03-14T22:00:58.000Z
|
2022-03-21T02:11:50.000Z
|
tests/feature_propagation_test.py
|
twitter-research/feature-propagation
|
af2733589eab4023fca67f7e71a3b46ddbbea8cd
|
[
"Apache-2.0"
] | 3 |
2022-03-09T05:36:53.000Z
|
2022-03-11T13:53:45.000Z
|
"""
Copyright 2020 Twitter, Inc.
SPDX-License-Identifier: Apache-2.0
"""
import unittest
import math
import torch
from feature_propagation import FeaturePropagation
class TestFeaturePropagation(unittest.TestCase):
def test_feature_propagation(self):
X = torch.Tensor([1 / 2, 0, 1 / 3, 0]).reshape(-1, 1)
node_mask = torch.BoolTensor([True, False, True, False])
edge_index = torch.LongTensor(
[[0, 2], [2, 0], [0, 3], [3, 0], [1, 2], [2, 1], [1, 3], [3, 1], [2, 3], [3, 2]]
).T
expected_X_propagated = torch.Tensor(
[[1 / 2], [(1 / math.sqrt(6)) * (1 / 3)], [1 / 3], [(1 / 3) * (1 / 3) + (1 / math.sqrt(6)) * (1 / 2)]]
)
fp = FeaturePropagation(num_iterations=1)
X_propagated = fp.propagate(x=X, edge_index=edge_index, mask=node_mask)
self.assertTrue(torch.allclose(expected_X_propagated, X_propagated))
if __name__ == "__main__":
unittest.main()
| 29.9375 | 114 | 0.59499 | 739 | 0.771399 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.085595 |
ddfeb380f562b06f02d54e1879c575812aad04dd
| 1,282 |
py
|
Python
|
publication_backbone/views/promotion.py
|
Excentrics/publication-backbone
|
65c9820308b09a6ae1086c265f8d49e36f3724b9
|
[
"BSD-3-Clause"
] | 6 |
2016-05-19T14:59:51.000Z
|
2020-03-19T10:08:29.000Z
|
publication_backbone/views/promotion.py
|
Excentrics/publication-backbone
|
65c9820308b09a6ae1086c265f8d49e36f3724b9
|
[
"BSD-3-Clause"
] | null | null | null |
publication_backbone/views/promotion.py
|
Excentrics/publication-backbone
|
65c9820308b09a6ae1086c265f8d49e36f3724b9
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from publication_backbone.views.publication import PublicationListHybridView
from publication_backbone import conf as config
#==============================================================================
# PromotionListHybridView
#==============================================================================
class PromotionListHybridView(PublicationListHybridView):
js_template_name_suffix = '_promotion_list'
template_name_suffix = '_promotion_list'
def get_context_data(self, **kwargs):
context = super(PromotionListHybridView, self).get_context_data(**kwargs)
category = context.get('category')
pid = self.kwargs.get('pid') or self.request.REQUEST.get('pid') or category.id if category else 'all'
context.update({
'name': "%s_promotion_%s" % (context.get('name', ''), pid )
})
return context
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return config.PUBLICATION_BACKBONE_PROMOTION_PER_PAGE_ITEMS_COUNT
def get_raw_order_by(self):
return self.kwargs.get('order_by') or self.request.REQUEST.get('order_by') or config.PUBLICATION_BACKBONE_PROMOTION_ORDER_BY
| 42.733333 | 132 | 0.620125 | 946 | 0.73791 | 0 | 0 | 0 | 0 | 0 | 0 | 410 | 0.319813 |
ddff02eefab9048a47d0d1a8a7f90b6f135ea01d
| 2,427 |
py
|
Python
|
tests/test_file.py
|
gaiadhi/traDSSAT
|
1d5615dbd4965bab5c2740134c706c1748ff5fae
|
[
"MIT"
] | 6 |
2020-10-05T11:50:37.000Z
|
2022-02-24T08:36:22.000Z
|
tests/test_file.py
|
gaiadhi/traDSSAT
|
1d5615dbd4965bab5c2740134c706c1748ff5fae
|
[
"MIT"
] | 23 |
2018-11-08T19:16:36.000Z
|
2021-07-20T23:34:18.000Z
|
tests/test_file.py
|
gaiadhi/traDSSAT
|
1d5615dbd4965bab5c2740134c706c1748ff5fae
|
[
"MIT"
] | 9 |
2018-11-06T21:04:07.000Z
|
2021-06-19T05:43:24.000Z
|
import os
import unittest
import numpy.testing as npt
from tradssat import SoilFile, WTHFile, MTHFile, ExpFile, CULFile, ECOFile, DSSATResults
from tradssat.out import SoilTempOut, SoilNiOut, SummaryOut, PlantGroOut, ETOut, SoilWatOut, MulchOut
from tests.utils import _test_read, _test_write, rsrcs, read_json, get_ref_var
input_classes = [SoilFile, WTHFile, MTHFile, ExpFile, CULFile, ECOFile]
rsrcs_out = os.path.join(rsrcs, 'Out')
output_classes = [PlantGroOut, SoilNiOut, SoilTempOut, SoilWatOut, MulchOut, ETOut]
final_out_classes = [SummaryOut]
# Inputs must be read and written
class TestInputs(unittest.TestCase):
def test_read(self):
for inp_class in input_classes:
with self.subTest(inp_class.__name__):
_test_read(inp_class, folder=rsrcs, testcase=self)
def test_write(self):
for inp_class in input_classes:
with self.subTest(inp_class.__name__):
_test_write(inp_class, rsrcs, testcase=self)
# Outputs are only read, not written
class TestOutputs(unittest.TestCase):
def test_read(self):
for out_class in output_classes:
with self.subTest(out_class.__name__):
_test_read(out_class, folder=rsrcs_out, testcase=self)
class TestFinalOutputs(unittest.TestCase):
def test_read(self):
for final_out_class in final_out_classes:
with self.subTest(final_out_class.__name__):
_test_read(final_out_class, folder=rsrcs_out, testcase=self)
class TestOutHeader(unittest.TestCase):
def setUp(self):
self.path = f'{rsrcs_out}/Cassava/headerTest'
self.ref = read_json(f'{self.path}/_ref_PlantGro.OUT.json')
self.instance = DSSATResults(self.path)
def test_entire_array(self):
actual = self.instance.get_value("HWAD", trt=1, run=2)
expected = get_ref_var(self.ref, "HWAD", trt=1, run=2)
npt.assert_equal(actual, expected)
def test_time_specific(self):
actual = self.instance.get_value("TWAD", trt=2, run=4, t=194, at='DAP')
expected = 9394
npt.assert_equal(actual, expected)
def test_wrong_values(self):
"""
Header var 'run' is unique for each simulation.
For the current ref file the correct input is: 'trt = 2' and 'run = 4'
"""
with npt.assert_raises(StopIteration):
self.instance.get_value("LAID", trt=2, run=2)
| 35.173913 | 101 | 0.689328 | 1,788 | 0.736712 | 0 | 0 | 0 | 0 | 0 | 0 | 323 | 0.133086 |
fb00206f76f0396ffc60257de95610a6a4ddebea
| 2,840 |
py
|
Python
|
airflow_spark_k8s/hooks/kubernetes.py
|
roitvt/airflow-spark-k8s
|
cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206
|
[
"Apache-2.0"
] | 2 |
2020-04-26T11:12:11.000Z
|
2020-09-14T16:36:42.000Z
|
airflow_spark_k8s/hooks/kubernetes.py
|
roitvt/airflow-spark-k8s
|
cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206
|
[
"Apache-2.0"
] | 1 |
2020-04-14T18:20:20.000Z
|
2020-04-14T18:26:27.000Z
|
airflow_spark_k8s/hooks/kubernetes.py
|
roitvt/airflow-spark-k8s
|
cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tempfile
from kubernetes import client, config
from airflow.hooks.base_hook import BaseHook
class KubernetesHook(BaseHook):
"""
Creates Kubernetes API connection.
:param conn_id: the connection to Kubernetes cluster
"""
def __init__(
self,
conn_id="kubernetes_default"
):
self.connection = self.get_connection(conn_id)
self.extras = self.connection.extra_dejson
def get_conn(self):
"""
Returns kubernetes api session for use with requests
"""
if self._get_field(("in_cluster")):
self.log.debug("loading kube_config from: in_cluster configuration")
config.load_incluster_config()
elif self._get_field("kube_config") is None or self._get_field("kube_config") == '':
self.log.debug("loading kube_config from: default file")
config.load_kube_config()
else:
with tempfile.NamedTemporaryFile() as temp_config:
self.log.debug("loading kube_config from: connection kube_config")
temp_config.write(self._get_field("kube_config").encode())
temp_config.flush()
config.load_kube_config(temp_config.name)
temp_config.close()
return client.ApiClient()
def get_namespace(self):
"""
Returns the namespace that defined in the connection
"""
return self._get_field("namespace", default="default")
def _get_field(self, field_name, default=None):
"""
Fetches a field from extras, and returns it. This is some Airflow
magic. The kubernetes hook type adds custom UI elements
to the hook page, which allow admins to specify in_cluster configutation, kube_config, namespace etc.
They get formatted as shown below.
"""
full_field_name = 'extra__kubernetes__{}'.format(field_name)
if full_field_name in self.extras:
return self.extras[full_field_name]
else:
return default
| 37.866667 | 109 | 0.679225 | 1,951 | 0.686972 | 0 | 0 | 0 | 0 | 0 | 0 | 1,593 | 0.560915 |
fb00a41d3f7f3756cedf6911bccc4f0b80b7ea08
| 325 |
py
|
Python
|
tests/test_main.py
|
david-kirby/gh-action-docs
|
b231d14b0b629b4f308eba6cff50a35a98c9f521
|
[
"MIT"
] | 1 |
2021-03-21T14:31:46.000Z
|
2021-03-21T14:31:46.000Z
|
tests/test_main.py
|
david-kirby/gh-action-docs
|
b231d14b0b629b4f308eba6cff50a35a98c9f521
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
david-kirby/gh-action-docs
|
b231d14b0b629b4f308eba6cff50a35a98c9f521
|
[
"MIT"
] | null | null | null |
import logging
import unittest
from src.gh_action_docs import app
logging.disable(logging.CRITICAL)
class TestActionFileCheck(unittest.TestCase):
def test_no_files_found(self):
results = app.check_for_file("not-existent-file")
self.assertFalse(results)
if __name__ == "__main__":
unittest.main()
| 20.3125 | 57 | 0.747692 | 172 | 0.529231 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.089231 |
fb00e00f9183ef8f33d3c9279268db8384609198
| 2,785 |
py
|
Python
|
astwro/tools/pickstat.py
|
majkelx/astwro
|
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
|
[
"MIT"
] | 6 |
2017-06-15T20:34:51.000Z
|
2020-04-15T14:21:43.000Z
|
astwro/tools/pickstat.py
|
majkelx/astwro
|
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
|
[
"MIT"
] | 18 |
2017-08-15T20:53:55.000Z
|
2020-10-05T23:40:34.000Z
|
astwro/tools/pickstat.py
|
majkelx/astwro
|
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
|
[
"MIT"
] | 2 |
2017-11-06T15:33:53.000Z
|
2020-10-02T21:06:05.000Z
|
#! /usr/bin/env python
# coding=utf-8
from __future__ import print_function, division
from scipy.stats import sigmaclip
from astwro.pydaophot import daophot
from astwro.pydaophot import fname
from astwro.pydaophot import allstar
from astwro.starlist import read_dao_file
from astwro.starlist import write_ds9_regions
import __commons as commons
# TODO: expand this script to (optionally) leave result files - make it allstar runner
# TODO: implement creating ds9 region (why? or remove that option)
def __do(args):
# 1 do daophot aperture and psf photometry and run allstar
dp = daophot(image_file=args.image)
dp.copy_to_runner_dir(args.coo, fname.COO_FILE)
dp.PHotometry()
dp.copy_to_runner_dir(args.lst, fname.LST_FILE)
dp.PSf()
dp.run(wait=True)
al = allstar(dp.dir)
al.run()
all_s = read_dao_file(al.file_from_runner_dir(fname.ALS_FILE))
# all_s.hist('chi')
return sigmaclip(all_s.chi)[0].mean()
# 2 write regions
# if args.regions:
#
# write_ds9_regions(coo_file+'.reg', )
def __arg_parser():
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=commons.version_string(),
description='Runs daophot photometry and allstar. Find mean chi of allstar stars. \n\n'
'Takes FITS image and star lists: coo - all stars, lst - PSF stars\n'
'then performs daophot\'s PH, PS and allstar on them.\n'
'(chi is calculated by allstar for every star as \n'
' \"the observed pixel-to-pixel scatter from the model image profile \n'
' DIVIDED BY the expected pixel-to-pixel scatter from the image profile\").\n'
'The same mean chi is used as function to be minimized by genetic \n'
'algorithm in gapick.py. This script allows quick comparison \n'
'between different PSF stars sets.',)
parser.add_argument('image', type=str,
help='FITS image file')
parser.add_argument('coo', type=str,
help='all stars list: coo file')
parser.add_argument('lst', type=str,
help='PSF stars list: lst file')
parser.add_argument('--reg', action='store_true',
help='create ds9 region files <name>.coo.reg and <name>.lst.reg')
return parser
# Below: standard skeleton for astwro.tools
def main(image, coo, lst, **kwargs):
args = commons.bunch_kwargs(__arg_parser(), positional=[image, coo, lst], **kwargs)
return __do(args)
def info():
commons.info(__arg_parser())
if __name__ == '__main__':
__args = __arg_parser().parse_args()
print(__do(__args))
| 34.8125 | 99 | 0.65386 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,132 | 0.406463 |
fb0391e58115974e49fe694882ae6ffe7af0d172
| 2,330 |
py
|
Python
|
5.4/IPheaderChecksum.py
|
Fecer/Computer-Networks
|
fe4fc5bd1af7f2957aee407675cb018fa83c6735
|
[
"MIT"
] | null | null | null |
5.4/IPheaderChecksum.py
|
Fecer/Computer-Networks
|
fe4fc5bd1af7f2957aee407675cb018fa83c6735
|
[
"MIT"
] | null | null | null |
5.4/IPheaderChecksum.py
|
Fecer/Computer-Networks
|
fe4fc5bd1af7f2957aee407675cb018fa83c6735
|
[
"MIT"
] | null | null | null |
from xml.dom.minidom import parse
def printLine():
print('------------------------------------------')
if __name__ == '__main__':
# 获取参数
DOMTree = parse("config.xml")
collection = DOMTree.documentElement
header = collection.getElementsByTagName('header')[0].childNodes[0].data
printLine()
print('Header:\n', header)
printLine()
version = header[:1]
print('Version:\n', version)
printLine()
headerLen = header[1:2]
print('Header Length:\n', headerLen)
printLine()
service = header[2:4]
print('Differentiated Services Field:\n', service)
printLine()
totalLen = header[4:8]
totalLen = int(totalLen, 16)
print('Total Length:\n', totalLen)
printLine()
id = header[8:12]
id = int(id, 16)
print('Identification:\n', id)
printLine()
flags = header[12:16]
flags = int(flags, 16)
print('Flags and Offset:\n', flags)
printLine()
ttl = header[16:18]
ttl = int(ttl, 16)
print('Time to live:\n', ttl)
printLine()
protocol = header[18:20]
protocol = int(protocol, 16)
print('Protocol:\n', protocol)
printLine()
checksum = header[20:24]
checksum = int(checksum, 16)
print('Checksum in header:\n', checksum, '(' + header[20:24] + ')')
printLine()
source = header[24:32]
a = str(int(source[0:2], 16))
b = str(int(source[2:4], 16))
c = str(int(source[4:6], 16))
d = str(int(source[6:8], 16))
print('Source IP:\n', a+'.'+b+'.'+c+'.'+d)
printLine()
dest = header[32:40]
a = str(int(dest[0:2], 16))
b = str(int(dest[2:4], 16))
c = str(int(dest[4:6], 16))
d = str(int(dest[6:8], 16))
print('Destination IP:\n', a + '.' + b + '.' + c + '.' + d)
printLine()
a = int(header[0:4], 16)
b = int(header[4:8], 16)
c = int(header[8:12], 16)
d = int(header[12:16], 16)
e = int(header[16:20], 16)
f = int(header[24:28], 16)
g = int(header[28:32], 16)
h = int(header[32:36], 16)
i = int(header[36:40], 16)
res1 = hex(a + b + c + d + e + f + g + h + i)
opnd1 = str(res1)[2:3]
opnd2 = str(res1)[3:7]
res2 = int(opnd1, 16) + int(opnd2, 16)
all = int('ffff', 16)
res2 = all - res2
checksum2 = res2
print('Checksum by calculated:')
print('0x%04x' % checksum2)
| 24.526316 | 76 | 0.546781 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 369 | 0.157827 |
fb04b4d690c1875517b3d05188b4d5a597035143
| 865 |
py
|
Python
|
server/src/dixit/game/test/player.py
|
rcjsuen/dixit-online
|
e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a
|
[
"MIT"
] | 75 |
2016-08-05T10:37:32.000Z
|
2022-01-27T17:05:50.000Z
|
server/src/dixit/game/test/player.py
|
rcjsuen/dixit-online
|
e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a
|
[
"MIT"
] | 15 |
2020-03-19T16:27:59.000Z
|
2022-02-13T08:41:54.000Z
|
server/src/dixit/game/test/player.py
|
rcjsuen/dixit-online
|
e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a
|
[
"MIT"
] | 27 |
2018-02-27T13:32:46.000Z
|
2021-12-26T06:42:08.000Z
|
from django.test import TestCase
from django.contrib.auth.models import User
from dixit import settings
from dixit.game.models.game import Game, GameStatus
from dixit.game.models.player import Player
class PlayerTest(TestCase):
fixtures = ['game_testcards.json', ]
def setUp(self):
self.user = User.objects.create(username='test', email='test@localhost', password='test')
self.user2 = User.objects.create(username='test2', email='test2@localhost', password='test2')
self.game = Game.objects.create(name='test')
def test_player_order_is_number_of_players(self):
player = Player.objects.create(game=self.game, user=self.user, name='player1')
self.assertEqual(player.number, 0)
player = Player.objects.create(game=self.game, user=self.user2, name='player2')
self.assertEqual(player.number, 1)
| 36.041667 | 101 | 0.719075 | 660 | 0.763006 | 0 | 0 | 0 | 0 | 0 | 0 | 104 | 0.120231 |
fb065d69e3fc3e1cb562662179076e51434cc538
| 7,933 |
py
|
Python
|
portality/settings.py
|
genonfire/portality
|
1d94382fa0e6685f106ae18d4a44f4f4b5946771
|
[
"MIT"
] | 2 |
2018-01-16T04:01:57.000Z
|
2020-04-06T14:07:11.000Z
|
portality/settings.py
|
genonfire/portality
|
1d94382fa0e6685f106ae18d4a44f4f4b5946771
|
[
"MIT"
] | null | null | null |
portality/settings.py
|
genonfire/portality
|
1d94382fa0e6685f106ae18d4a44f4f4b5946771
|
[
"MIT"
] | 1 |
2017-12-31T02:45:38.000Z
|
2017-12-31T02:45:38.000Z
|
# -*- coding: utf-8 -*-
"""
Django settings for portality project.
Generated by 'django-admin startproject' using Django 1.8.17.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from collections import namedtuple
import json
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Theme
THEME = 'haru'
TEMPLATES_DIR = os.path.join(BASE_DIR, 'templates')
THEME_DIR = os.path.join(BASE_DIR, 'templates', THEME)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
if 'DJANGO_DEBUG' in os.environ:
if (os.environ['DJANGO_DEBUG'] == 'Debug'):
DEBUG = True
ALLOWED_HOSTS = ['nolooknews.com', 'gencode.me', 'localhost']
# Application definition
DJANGO_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
THIRD_PARTY_APPS = (
'rest_framework',
'graphos',
# 'allauth',
# 'allauth.account',
# 'allauth.socialaccount',
# 'allauth.socialaccount.providers.naver',
)
EDITOR_APPS = (
)
LOCAL_APPS = (
'core',
'giza',
'issue',
'accounts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + EDITOR_APPS + LOCAL_APPS
# SITE_ID = 1
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'portality.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(THEME_DIR),
os.path.join(TEMPLATES_DIR),
],
'OPTIONS': {
'debug': DEBUG,
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'core.context_processors.global_settings',
# 'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'portality.wsgi.application'
try:
with open(os.path.join(BASE_DIR, "secrets.json")) as f:
data = json.loads(f.read())
SecretsNamedTuple = namedtuple('SecretsNamedTuple', data.keys(), verbose=False)
secrets = SecretsNamedTuple(*[data[x] for x in data.keys()])
SECRET_KEY = getattr(secrets, "SECRET_KEY")
DB_NAME = getattr(secrets, "DB_NAME")
DB_USER = getattr(secrets, "DB_USER")
DB_PASSWORD = getattr(secrets, "DB_PASSWORD")
EMAIL_HOST = getattr(secrets, "EMAIL_HOST")
EMAIL_HOST_USER = getattr(secrets, "EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = getattr(secrets, "EMAIL_HOST_PASSWORD")
DEFAUL_FROM_EMAIL = getattr(secrets, "DEFAUL_FROM_EMAIL")
SERVER_EMAIL = getattr(secrets, "SERVER_EMAIL")
except IOError:
SECRET_KEY = 'k8n13h0y@$=v$uxg*^brlv9$#hm8w7nye6km!shc*&bkgkcd*p'
DB_NAME = ''
DB_USER = ''
DB_PASSWORD = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
DEFAUL_FROM_EMAIL = ''
SERVER_EMAIL = ''
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': DB_NAME,
'USER': DB_USER,
'PASSWORD': DB_PASSWORD,
'HOST': 'localhost',
'PORT': '',
}
}
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
# 'allauth.account.auth_backends.AuthenticationBackend'
)
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'ko-KR'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/assets/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_URL = '/upload/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'upload')
LOGIN_REDIRECT_URL = 'login'
# Setting for ranking
RANKING_START_YEAR = 2017
RANKING_START_MONTH = 5
RANKING_DATE_DELTA = 7 # 랭킹 계산에 포함될 시간(일)
RANKING_LIST_LIMIT = 10 # 보여줄 순위
# Setting for today
HOTISSUE_LIMIT = 20 # 핫이슈 리스트 개수
HOTISSUE_DATE_DELTA = 7 # 최근 뉴스에 포함될 시간(일)
# SEtting for Burst Call
POINT_MAX = 50 # point max
BURST_CALL_MIN_POINT = 10 # 버스터콜에 필요한 최소 포인트
BURST_CALL_ACOM = 1 # acom for burster call
# Setting for best
BEST_LIST_LIMIT = 20 # 리스트 당 개수
BEST_THRESHOLD = 20 # 베스트에 올라갈 기준
# Setting for issue
FILTER_DATE_DELTA = 7 # 핫이슈에 개제될 시간(일)
MEDIA_CHOICE = ( # 매체 종류
('조선일보', '조선일보'),
('중앙일보', '중앙일보'),
('동아일보', '동아일보'),
('한겨레', '한겨레'),
('경향신문', '경향신문'),
('오마이뉴스', '오마이뉴스'),
('미디어오늘', '미디어오늘'),
('KBS', 'KBS'),
('MBC', 'MBC'),
('SBS', 'SBS'),
('TV조선', 'TV조선'),
('채널A', '채널A'),
('JTBC', 'JTBC'),
('MBN', 'MBN'),
('YTN', 'YTN'),
('연합뉴스', '연합뉴스'),
('뉴시스', '뉴시스'),
('뉴스1', '뉴스1'),
('국민일보', '국민일보'),
('국제신문', '국제신문'),
('CBS노컷뉴스', 'CBS노컷뉴스'),
('NewBC', 'NewBC'),
('뉴데일리', '뉴데일리'),
('뉴스타파', '뉴스타파'),
('뉴스토마토', '뉴스토마토'),
('뉴스핌', '뉴스핌'),
('더팩트', '더팩트'),
('데일리안', '데일리안'),
('디지털데일리', '디지털데일리'),
('디지털타임스', '디지털타임스'),
('마이데일리', '마이데일리'),
('매일경제', '매일경제'),
('머니투데이', '머니투데이'),
('문화일보', '문화일보'),
('문화저널21', '문화저널21'),
('미디어스', '미디어스'),
('민중의소리', '민중의소리'),
('서울신문', '서울신문'),
('서울경제', '서울경제'),
('세계일보', '세계일보'),
('시사iN', '시사iN'),
('시사저널', '시사저널'),
('아시아경제', '아시아경제'),
('아시아투데이', '아시아투데이'),
('아이뉴스', '아이뉴스'),
('에너지경제신문', '에너지경제신문'),
('여성신문', '여성신문'),
('위키트리', '위키트리'),
('이데일리', '이데일리'),
('전자신문', '전자신문'),
('조세일보', '조세일보'),
('ZDNet', 'ZDNet'),
('쿠키뉴스', '쿠키뉴스'),
('파이낸셜뉴스', '파이낸셜뉴스'),
('프레시안', '프레시안'),
('한국경제', '한국경제'),
('한국일보', '한국일보'),
('헤럴드경제', '헤럴드경제'),
('기자협회보', '기자협회보'),
('기타', '기타'),
)
GIZA_IMAGE_SIZE_LIMIT = 100 * 1024 # 기자 사진 사이즈 제한
ABOUT_LINK = "/assets/html/howto_newissue.html"
LOGO_NAME = "/assets/images/nolooknews.png"
NEWBC_LINK = "http://newbc.kr/bbs/board.php?bo_table=nolook"
NEWBC_IMG = "/assets/images/newbc.png"
NEWBC_IMG_SMALL = "/assets/images/newbc-small.png"
# Admin information
ADMIN_EMAIL = '[email protected]'
ADMIN_TWITTER = 'https://twitter.com/nolooknews'
FOOTER_TAGS = '<li><a href="%s">노룩뉴스 소개</a></li>\
<li>테마 : %s</li>\
<li>문의, 의견 보내기</li>\
<li><a href="mailto:%s"><img src="/assets/icons/email24.png"></a></li>\
<li><a href="%s"><img src="/assets/icons/twitter24.png" target="_blank"></a></li>'\
% (ABOUT_LINK, THEME, ADMIN_EMAIL, ADMIN_TWITTER)
| 27.167808 | 83 | 0.63343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,517 | 0.613614 |
fb07a1b009f2ec4dd007b6d28b15a16a4abfeb17
| 29 |
py
|
Python
|
python/callback/func/func.py
|
qingkaikong/useful_script
|
2547931dd11dbff7438e323ff4cd168427ff92ce
|
[
"BSD-3-Clause"
] | 4 |
2016-03-16T17:06:42.000Z
|
2021-07-26T15:43:42.000Z
|
python/callback/func/func.py
|
qingkaikong/useful_script
|
2547931dd11dbff7438e323ff4cd168427ff92ce
|
[
"BSD-3-Clause"
] | null | null | null |
python/callback/func/func.py
|
qingkaikong/useful_script
|
2547931dd11dbff7438e323ff4cd168427ff92ce
|
[
"BSD-3-Clause"
] | 3 |
2015-12-01T20:38:19.000Z
|
2020-12-15T20:10:34.000Z
|
def test(a, b):
print b()
| 14.5 | 15 | 0.517241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
fb07a87646537aa7d24a8747c95b7298fca053cd
| 98 |
py
|
Python
|
backup_codes/apps.py
|
biligunb/portal-web-app
|
cd10cda3d8bfc8f055a268e14f9665f142051c90
|
[
"MIT"
] | null | null | null |
backup_codes/apps.py
|
biligunb/portal-web-app
|
cd10cda3d8bfc8f055a268e14f9665f142051c90
|
[
"MIT"
] | null | null | null |
backup_codes/apps.py
|
biligunb/portal-web-app
|
cd10cda3d8bfc8f055a268e14f9665f142051c90
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class BackupCodesConfig(AppConfig):
name = "backup_codes"
| 16.333333 | 35 | 0.77551 | 61 | 0.622449 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.142857 |
fb07d1f256a2f6d7a6cc9dbdf801ef7f4558d52a
| 323 |
py
|
Python
|
TP_ALGO_3/convert.py
|
PierreLeGuen/ALGO_S5
|
9067e887d14fe997c6944292a0cff23ceda47b6e
|
[
"MIT"
] | null | null | null |
TP_ALGO_3/convert.py
|
PierreLeGuen/ALGO_S5
|
9067e887d14fe997c6944292a0cff23ceda47b6e
|
[
"MIT"
] | null | null | null |
TP_ALGO_3/convert.py
|
PierreLeGuen/ALGO_S5
|
9067e887d14fe997c6944292a0cff23ceda47b6e
|
[
"MIT"
] | null | null | null |
def convert(n,base):
if n < base:
res = str(n)
else:
res = convert(n//base,base) + str(n%base)
return res
print(convert(10,2))
def convert_inv(n,base):
if n < base:
res = str(n)
else:
res = str(n%base) + convert(n//base,base)
return res
print(convert_inv(10,2))
| 16.15 | 49 | 0.544892 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
fb083e9034d5ab0e5ac6315a7e5ffb3f614cc66e
| 1,029 |
py
|
Python
|
tests/motors/spikebase_tank1.py
|
cschlack/pybricks-micropython
|
0abfd2918267a4e6e7a04062976ac1bb3da1f4b1
|
[
"MIT"
] | 1 |
2021-12-27T00:09:37.000Z
|
2021-12-27T00:09:37.000Z
|
tests/motors/spikebase_tank1.py
|
cschlack/pybricks-micropython
|
0abfd2918267a4e6e7a04062976ac1bb3da1f4b1
|
[
"MIT"
] | null | null | null |
tests/motors/spikebase_tank1.py
|
cschlack/pybricks-micropython
|
0abfd2918267a4e6e7a04062976ac1bb3da1f4b1
|
[
"MIT"
] | null | null | null |
from pybricks.pupdevices import Motor
from pybricks.tools import wait
from pybricks.parameters import Port, Direction
from pybricks.robotics import SpikeBase
from pybricks import version
print(version)
# Initialize base.
left_motor = Motor(Port.C)
right_motor = Motor(Port.D)
spike_base = SpikeBase(left_motor, right_motor)
# Allocate logs for motors and controller signals.
DURATION = 6000
left_motor.log.start(DURATION)
right_motor.log.start(DURATION)
spike_base.distance_control.log.start(DURATION)
spike_base.heading_control.log.start(DURATION)
# Turn in place, almost.
spike_base.tank_move_for_degrees(speed_left=250, speed_right=-247, angle=182)
# Wait so we can also log hold capability, then turn off the motor completely.
wait(100)
spike_base.stop()
# Transfer data logs.
print("Transferring data...")
left_motor.log.save("servo_left.txt")
right_motor.log.save("servo_right.txt")
spike_base.distance_control.log.save("control_distance.txt")
spike_base.heading_control.log.save("control_heading.txt")
print("Done")
| 29.4 | 78 | 0.808552 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 295 | 0.286686 |
fb097754f4efecf6545051b20dad8dee8adaa09e
| 1,136 |
py
|
Python
|
cyto/basic/_dict.py
|
sbtinstruments/cyto
|
f452562e5e9ae9d2516cd92958af6e6a2c985dcc
|
[
"MIT"
] | 5 |
2021-04-03T04:09:38.000Z
|
2021-12-17T15:05:18.000Z
|
cyto/basic/_dict.py
|
sbtinstruments/cyto
|
f452562e5e9ae9d2516cd92958af6e6a2c985dcc
|
[
"MIT"
] | 1 |
2021-04-21T17:00:29.000Z
|
2021-04-21T19:12:30.000Z
|
cyto/basic/_dict.py
|
sbtinstruments/cyto
|
f452562e5e9ae9d2516cd92958af6e6a2c985dcc
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict
def deep_update(dest: Dict[Any, Any], other: Dict[Any, Any]) -> None:
"""Update `dest` with the key/value pairs from `other`.
Returns `None`. Note that we modify `dest` in place.
Unlike the built-in `dict.Update`, `deep_update` recurses into sub-dictionaries.
This effectively "merges" `other` into `dest`.
Note that we do not recurse into lists. We treat lists like any other
non-`dict` type and simply override the existing entry in `dest` (if any).
"""
for key, other_val in other.items():
# Simple case: `key` is not in `dest`, so we simply add it.
if key not in dest:
dest[key] = other_val
continue
# Complex case: There is a conflict, so we must "merge" `dest_val`
# and `other_val`.
dest_val = dest[key]
# Given two dicts, we can simply recurse.
if isinstance(dest_val, dict) and isinstance(other_val, dict):
deep_update(dest_val, other_val)
# Any other type combination simply overrides the existing key in `dest`
else:
dest[key] = other_val
| 39.172414 | 84 | 0.634683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 668 | 0.588028 |
fb0b0d61a04227b98b766bf05a1daba731f1fb99
| 2,312 |
py
|
Python
|
api_logic_server_cli/create_from_model/ui_basic_web_app_run.py
|
valhuber/ApiLogicServer
|
a4acd8d886a18d4d500e0fffffcaa2f1c0765040
|
[
"BSD-3-Clause"
] | 71 |
2021-01-23T17:34:33.000Z
|
2022-03-29T13:11:29.000Z
|
api_logic_server_cli/create_from_model/ui_basic_web_app_run.py
|
valhuber/ApiLogicServer
|
a4acd8d886a18d4d500e0fffffcaa2f1c0765040
|
[
"BSD-3-Clause"
] | 38 |
2021-01-24T21:56:30.000Z
|
2022-03-08T18:49:00.000Z
|
api_logic_server_cli/create_from_model/ui_basic_web_app_run.py
|
valhuber/ApiLogicServer
|
a4acd8d886a18d4d500e0fffffcaa2f1c0765040
|
[
"BSD-3-Clause"
] | 14 |
2021-01-23T16:20:44.000Z
|
2022-03-24T10:48:28.000Z
|
# runs ApiLogicServer basic web app:
# python ui/basic_web_app/run.py
# Export PYTHONPATH, to enable python ui/basic_web_app/run.py
import os, sys, logging
from pathlib import Path
logger = logging.getLogger()
current_path = Path(os.path.abspath(os.path.dirname(__file__)))
current_path = current_path.parent.absolute() # ui
current_path = current_path.parent.absolute() # project
project_dir = str(current_path)
sys.path.append(project_dir)
import ui.basic_web_app.config as config
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.INFO) # DEBUG, INFO, <default> WARNING, ERROR, CRITICAL
auto_log_narrow = True
if auto_log_narrow and config.SQLALCHEMY_DATABASE_URI.endswith("db.sqlite"):
formatter = logging.Formatter('%(message).120s') # lead tag - '%(name)s: %(message)s')
else:
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.propagate = True
fab_logger = logging.getLogger("flask_appbuilder")
fab_logger.setLevel(logging.WARNING)
logic_logger = logging.getLogger("logic_logger")
logic_logger.setLevel(logging.INFO)
logger.setLevel(logging.WARNING) # WARNING to reduce output, INFO for more
logger.info(f'ui/basic_web_app/run.py - project_dir: {project_dir}')
if auto_log_narrow and config.SQLALCHEMY_DATABASE_URI.endswith("db.sqlite"):
logger.warning("\nLog width reduced for readability - "
"see https://github.com/valhuber/ApiLogicServer/wiki/Tutorial#word-wrap-on-the-log\n")
# args for help
import sys
if len(sys.argv) > 1 and sys.argv[1].__contains__("help"):
print("")
print("basic_web_app - run instructions (defaults are host 0.0.0.0, port 5002):")
print(" python run.py [host [port]]")
print("")
sys.exit()
try:
logger.debug("\nui/basic_web_app/run.py - PYTHONPATH" + str(sys.path) + "\n")
# e.g., /Users/val/dev/servers/api_logic_server/ui/basic_web_app
from app import app # ui/basic_web_app/app/__init__.py activates logic
except Exception as e:
logger.error("ui/basic_web_app/run.py - Exception importing app: " + str(e))
# args to avoid port conflicts, e.g., localhost 8080
host = sys.argv[1] if sys.argv[1:] \
else "0.0.0.0"
port = sys.argv[2] if sys.argv[2:] \
else "5002"
app.run(host=host, port=port, debug=True)
| 36.125 | 105 | 0.733131 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 941 | 0.407007 |
fb0b24730ac65daad4c5e515482703fc512b4066
| 300 |
py
|
Python
|
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1 |
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4 |
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/nist_data/list_pkg/non_positive_integer/schema_instance/nistschema_sv_iv_list_non_positive_integer_length_1_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.nist_data.list_pkg.non_positive_integer.schema_instance.nistschema_sv_iv_list_non_positive_integer_length_1_xsd.nistschema_sv_iv_list_non_positive_integer_length_1 import NistschemaSvIvListNonPositiveIntegerLength1
__all__ = [
"NistschemaSvIvListNonPositiveIntegerLength1",
]
| 50 | 233 | 0.91 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 45 | 0.15 |
fb0c48302e90f96e57473fb786d4ea50f4be0f46
| 190 |
py
|
Python
|
cbot/__init__.py
|
wangyitao/cbot
|
6b2500f5118ddd5ef581f31104e70e5a57b72f7d
|
[
"MIT"
] | 8 |
2018-10-18T09:15:36.000Z
|
2019-09-01T04:42:59.000Z
|
cbot/__init__.py
|
wangyitao/cbot
|
6b2500f5118ddd5ef581f31104e70e5a57b72f7d
|
[
"MIT"
] | 1 |
2018-10-19T06:35:38.000Z
|
2018-10-19T06:35:38.000Z
|
cbot/__init__.py
|
wangyitao/cbot
|
6b2500f5118ddd5ef581f31104e70e5a57b72f7d
|
[
"MIT"
] | 5 |
2018-10-19T05:56:26.000Z
|
2019-09-01T04:43:11.000Z
|
from .main import CBot
__version__ = '0.1.0'
__author__ = 'Felix Wang'
__email__ = '[email protected]'
__url__ = 'https://github.com/wangyitao/cbot'
__all__ = (
'CBot',
)
| 15.833333 | 46 | 0.636842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.421053 |
fb0e6ce0d08d8c7d2254af54405bab1d2071c99d
| 4,219 |
py
|
Python
|
elastic_inference/apps/infer_service.py
|
qzheng527/cloud-native-demos
|
e2dbcfc0d90c1972bc34a35f5d85f83f2b2b6cf6
|
[
"Apache-2.0"
] | 1 |
2020-04-06T10:11:27.000Z
|
2020-04-06T10:11:27.000Z
|
elastic_inference/apps/infer_service.py
|
qzheng527/cloud-native-demos
|
e2dbcfc0d90c1972bc34a35f5d85f83f2b2b6cf6
|
[
"Apache-2.0"
] | null | null | null |
elastic_inference/apps/infer_service.py
|
qzheng527/cloud-native-demos
|
e2dbcfc0d90c1972bc34a35f5d85f83f2b2b6cf6
|
[
"Apache-2.0"
] | 2 |
2021-01-19T21:42:08.000Z
|
2021-08-13T19:59:06.000Z
|
#!/usr/bin/python3
"""
Infer service.
It pick up single frame from frame queue and do inference. The result will be
published to stream broker like below.
+---------------------+ +---------------+ +-----------------------+
| Frame Queue (redis) | => | Infer Service | => | Stream broker (redis) |
+---------------------+ +---------------+ +-----------------------+
||
##
+--------------------------------+
| Infer Frame Speed (prometheus) |
+--------------------------------+
The infer service can be scaled by kubernete HPA(Horizontal Pod Autoscale)
dynamically according to the metrics like "drop frame speed", "infer frame speed"
"CPU usage" etc.
"""
import os
import sys
import logging
import signal
import socket
import redis
import prometheus_client as prom
# add current path into PYTHONPATH
APP_PATH = os.path.dirname(__file__)
sys.path.append(APP_PATH)
from clcn.appbase import CLCNAppBase # pylint: disable=wrong-import-position
from clcn.frame import RedisFrameQueue # pylint: disable=wrong-import-position
from clcn.stream import RedisStreamBroker # pylint: disable=wrong-import-position
from clcn.nn.inferengine import OpenVinoInferEngineTask # pylint: disable=wrong-import-position
LOG = logging.getLogger(__name__)
class InferServiceApp(CLCNAppBase):
"""
Inference service.
"""
def init(self):
LOG.info("Host name: %s", socket.gethostname())
LOG.info("Host ip: %s", socket.gethostbyname(socket.gethostname()))
self.in_queue_host = self.get_env("INPUT_QUEUE_HOST", "127.0.0.1")
self.out_broker_host = self.get_env("OUTPUT_BROKER_HOST", "127.0.0.1")
LOG.info("Input queue host: %s", self.in_queue_host)
LOG.info("Output broker host: %s", self.out_broker_host)
self.infer_type = self.get_env("INFER_TYPE", "face")
self.model_name = self.get_env("INFER_MODEL_NAME")
# MODEL_PATH env got higher priority
path = self.get_env("INFER_MODEL_PATH")
if path is not None and len(path) != 0:
self.model_dir = self.get_env("INFER_MODEL_PATH")
else:
self.model_dir = self.get_env("MODEL_DIR")
LOG.info("model dir: %s", self.model_dir)
LOG.info("model name: %s", self.model_name)
self._guage_infer_fps = prom.Gauge(
'ei_infer_fps', 'Total infererence FPS')
self._guage_drop_fps = prom.Gauge(
'ei_drop_fps', 'Drop frames for infer')
self._guage_scale_ratio = prom.Gauge(
'ei_scale_ratio', 'Scale ratio for inference, (ei_infer_fps+ei_drop_fps)/ei_infer_fps')
def run(self):
in_redis_conn = redis.StrictRedis(self.in_queue_host)
out_redis_conn = in_redis_conn
if self.in_queue_host != self.out_broker_host:
out_redis_conn = redis.StrictRedis(self.out_broker_host)
input_queue = RedisFrameQueue(in_redis_conn, self.infer_type)
out_broker = RedisStreamBroker(out_redis_conn)
out_broker.start_streams_monitor_task()
infer_task = OpenVinoInferEngineTask(input_queue, out_broker,
self._report_metric,
model_dir=self.model_dir,
model_name=self.model_name)
infer_task.start()
prom.start_http_server(8000)
def _report_metric(self, infer_fps, drop_fps, scale_ratio):
self._guage_infer_fps.set(infer_fps)
self._guage_drop_fps.set(drop_fps)
self._guage_scale_ratio.set(scale_ratio)
def start_app():
"""
App entry.
"""
app = InferServiceApp()
def signal_handler(num, _):
logging.getLogger().error("signal %d", num)
app.stop()
sys.exit(1)
# setup the signal handler
signames = ['SIGINT', 'SIGHUP', 'SIGQUIT', 'SIGUSR1']
for name in signames:
signal.signal(getattr(signal, name), signal_handler)
app.run_and_wait_task()
if __name__ == "__main__":
start_app()
| 35.158333 | 99 | 0.599194 | 2,314 | 0.548471 | 0 | 0 | 0 | 0 | 0 | 0 | 1,577 | 0.373785 |
fb0ed9b104a5cd8f1fa264f1f6318ff1bd1ed415
| 288 |
py
|
Python
|
P25010-Guangzhou-Jiachengwu/week07/ex_filecopy.py
|
xiaohh2016/python-25
|
8981ba89bfb32754c3f9c881ee8fcaf13332ce51
|
[
"Apache-2.0"
] | 1 |
2019-09-11T23:24:58.000Z
|
2019-09-11T23:24:58.000Z
|
P25010-Guangzhou-Jiachengwu/week07/ex_filecopy.py
|
xiaohh2016/python-25
|
8981ba89bfb32754c3f9c881ee8fcaf13332ce51
|
[
"Apache-2.0"
] | null | null | null |
P25010-Guangzhou-Jiachengwu/week07/ex_filecopy.py
|
xiaohh2016/python-25
|
8981ba89bfb32754c3f9c881ee8fcaf13332ce51
|
[
"Apache-2.0"
] | 5 |
2019-09-11T06:33:34.000Z
|
2020-02-17T12:52:31.000Z
|
# 使用Python copy一个文件,从a目录,copy文件到b目录
import os
from pathlib import Path
import shutil
src_path=Path('a/test')
dst_path=Path('b/test')
with open(src_path,'w') as src_file:
src_file.write('abcd\n1234')
shutil.copy(src_path,dst_path)
print(os.stat(src_path))
print(os.stat(dst_path))
| 18 | 36 | 0.756944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.301887 |
fb0fe20410b5c56d7291f72bd22a841605532524
| 548 |
py
|
Python
|
vanirio/module/interface/textfield.py
|
vaniriovanhalteren/sdk-python
|
947b08fbe046d46275bf39bc95984fbf3edc0e6c
|
[
"MIT"
] | null | null | null |
vanirio/module/interface/textfield.py
|
vaniriovanhalteren/sdk-python
|
947b08fbe046d46275bf39bc95984fbf3edc0e6c
|
[
"MIT"
] | null | null | null |
vanirio/module/interface/textfield.py
|
vaniriovanhalteren/sdk-python
|
947b08fbe046d46275bf39bc95984fbf3edc0e6c
|
[
"MIT"
] | 1 |
2022-02-08T08:15:07.000Z
|
2022-02-08T08:15:07.000Z
|
from vanirio.module.interface.base import Base
class Textfield(Base):
def __init__(self, tag: str, label: str):
"""
A Textfield interface object is used as a parameter for module
software. The interface can be used for string values.
:param label: GUI label name
:param tag: Module tag name
"""
super().__init__()
self.label = label
self.tag = tag
self._type = 'TEXTFIELD'
def encode(self) -> dict:
_base = self.encode_base()
return _base
| 24.909091 | 70 | 0.600365 | 497 | 0.906934 | 0 | 0 | 0 | 0 | 0 | 0 | 234 | 0.427007 |
fb1042dc49fa8be20e43a1d8892da1c69f7bf202
| 2,348 |
py
|
Python
|
utils.py
|
lanyinemt2/ST-PlusPlus
|
7c31abfcf21e390a06c4d5da1f77a9fe5ff071ed
|
[
"MIT"
] | 73 |
2021-06-10T01:12:04.000Z
|
2022-03-30T08:31:24.000Z
|
utils.py
|
lanyinemt2/ST-PlusPlus
|
7c31abfcf21e390a06c4d5da1f77a9fe5ff071ed
|
[
"MIT"
] | 12 |
2021-07-01T00:27:11.000Z
|
2022-03-17T05:09:49.000Z
|
utils.py
|
lanyinemt2/ST-PlusPlus
|
7c31abfcf21e390a06c4d5da1f77a9fe5ff071ed
|
[
"MIT"
] | 18 |
2021-06-10T11:24:31.000Z
|
2022-03-31T16:48:58.000Z
|
import numpy as np
from PIL import Image
def count_params(model):
param_num = sum(p.numel() for p in model.parameters())
return param_num / 1e6
class meanIOU:
def __init__(self, num_classes):
self.num_classes = num_classes
self.hist = np.zeros((num_classes, num_classes))
def _fast_hist(self, label_pred, label_true):
mask = (label_true >= 0) & (label_true < self.num_classes)
hist = np.bincount(
self.num_classes * label_true[mask].astype(int) +
label_pred[mask], minlength=self.num_classes ** 2).reshape(self.num_classes, self.num_classes)
return hist
def add_batch(self, predictions, gts):
for lp, lt in zip(predictions, gts):
self.hist += self._fast_hist(lp.flatten(), lt.flatten())
def evaluate(self):
iu = np.diag(self.hist) / (self.hist.sum(axis=1) + self.hist.sum(axis=0) - np.diag(self.hist))
return iu, np.nanmean(iu)
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
return cmap
| 33.070423 | 106 | 0.520017 | 802 | 0.341567 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.017462 |
fb107ee4532ec8cc33a8bdd76e5d3973b9f4d818
| 3,671 |
py
|
Python
|
lib/bus/client.py
|
hoffmannmatheus/eaZy
|
d79ade0e01a23f1c6fa585ee378ed70c95976b05
|
[
"MIT",
"Unlicense"
] | 3 |
2015-01-11T15:29:48.000Z
|
2020-09-08T14:52:14.000Z
|
lib/bus/client.py
|
hoffmannmatheus/eaZy
|
d79ade0e01a23f1c6fa585ee378ed70c95976b05
|
[
"MIT",
"Unlicense"
] | null | null | null |
lib/bus/client.py
|
hoffmannmatheus/eaZy
|
d79ade0e01a23f1c6fa585ee378ed70c95976b05
|
[
"MIT",
"Unlicense"
] | null | null | null |
import zmq
import json
"""
Class used by the Client entity to communicate to the Server.
The communication channel should be configured using the three ports:
- com_port: Used to receive broadcast messages from the Server entity.
- set_port: Used to send messages/request data to the Server entity.
- res_port: Used to receive a responce from a Server.
"""
defaults = {
'host' : '127.0.0.1',
'com_port' : 5556,
'set_port' : 5557,
'res_port' : 5558
}
context = zmq.Context()
class BusClient:
def __init__(self, id, filter, opt=defaults):
"""
Constructs a new Bus Client instance.
@param id The identification of this Client.
@param filter The filter (Server id) of messages.
@param opt An object that contains the configuration for this Bus
Client. If provided, the default configurations will be set. eg:
{host="localhost", com_port=1, set_port=2, res_port=3}
"""
self.id = id
self.filter = filter
self.host = opt['host']
self.com_port = opt['com_port']
self.set_port = opt['set_port']
self.res_port = opt['res_port']
def setup(self):
"""
Prepares this Bus Server to be used. Before sending/receiving messages,
the method setup() should be called to properly setup the socket
configurations.
"""
self.context = zmq.Context()
self.sub_socket = self.context.socket(zmq.SUB)
if self.filter:
self.sub_socket.setsockopt(zmq.SUBSCRIBE, self.filter)
self.sub_socket.connect('tcp://'+self.host+':'+str(self.com_port))
return self
def check_income(self, blocking=None):
"""
Receives a message from the communication channel's Server. Tryies to
get a message from the communication channel, checking the 'com_port'
for broadcast messages from the Server.
@param blocking If false, the method will check if there is a message
and then retrun the message, if it exists, or 'nil' if no message was
received. If true, the method will block the interpreter until a new
message arrives, which then is returned.
"""
raw_data = ''
try:
raw_data = self.sub_socket.recv(zmq.NOBLOCK)
except zmq.error.Again:
return False
sender, msg = raw_data.split(' ', 1)
return json.loads(msg), sender
def send(self, data, type='send'):
"""
Send a message to the Server. Send the given message to the Server of
this communication channel, using the 'set_port'.
@param msg An object or string containing the message.
"""
msg = {'type':type, 'data':data, 'sender':self.id}
set_socket = self.context.socket(zmq.PAIR)
set_socket.connect('tcp://'+self.host+':'+str(self.set_port))
set_socket.send(json.dumps(msg))
set_socket.close()
return self
def get(self, request):
"""
Make a request for the Server. When called, a message is sent to the
Server indicating this Client has made a request. The Client will stay
blocked until the response from the Server is received on the
'res_port', and then returned.
@param request A string indicating the request (eg. 'device_list')
"""
self.send(request, 'get')
res_socket = self.context.socket(zmq.PAIR)
res_socket.bind('tcp://'+self.host+':'+str(self.res_port))
response = res_socket.recv()
res_socket.close()
return json.loads(response)['data']
| 35.990196 | 79 | 0.625443 | 3,168 | 0.86298 | 0 | 0 | 0 | 0 | 0 | 0 | 2,158 | 0.587851 |
fb12ef0139d2387d5de8cd18fde96987527d5c7f
| 2,821 |
py
|
Python
|
ops.py
|
fivoskal/MGAN
|
2eb1407c907af5f472a80e8ae363bee57d5cfaa4
|
[
"MIT"
] | 37 |
2018-03-07T15:32:09.000Z
|
2022-03-01T06:54:06.000Z
|
ops.py
|
fivoskal/MGAN
|
2eb1407c907af5f472a80e8ae363bee57d5cfaa4
|
[
"MIT"
] | 2 |
2018-09-19T23:20:07.000Z
|
2019-06-15T13:45:54.000Z
|
ops.py
|
fivoskal/MGAN
|
2eb1407c907af5f472a80e8ae363bee57d5cfaa4
|
[
"MIT"
] | 18 |
2018-05-23T11:09:34.000Z
|
2022-03-22T08:38:13.000Z
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
import tensorflow as tf
def lrelu(x, alpha=0.2):
return tf.maximum(x, alpha * x)
def linear(input, output_dim, scope='linear', stddev=0.01):
norm = tf.random_normal_initializer(stddev=stddev)
const = tf.constant_initializer(0.0)
with tf.variable_scope(scope):
w = tf.get_variable('weights', [input.get_shape()[1], output_dim], initializer=norm)
b = tf.get_variable('biases', [output_dim], initializer=const)
return tf.matmul(input, w) + b
def conv2d(input_, output_dim,
k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,
name="conv2d"):
with tf.variable_scope(name):
w = tf.get_variable('weights', [k_h, k_w, input_.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding='SAME')
biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0))
# conv = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
return tf.nn.bias_add(conv, biases)
def deconv2d(input_, output_shape,
k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,
name="deconv2d", with_w=False):
with tf.variable_scope(name):
# filter : [height, width, output_channels, in_channels]
w = tf.get_variable('weights', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
try:
deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape,
strides=[1, d_h, d_w, 1])
# Support for versions of TensorFlow before 0.7.0
except AttributeError:
deconv = tf.nn.deconv2d(input_, w, output_shape=output_shape,
strides=[1, d_h, d_w, 1])
biases = tf.get_variable('biases', [output_shape[-1]],
initializer=tf.constant_initializer(0.0))
deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape())
if with_w:
return deconv, w, biases
else:
return deconv
def gmm_sample(num_samples, mix_coeffs, mean, cov):
z = np.random.multinomial(num_samples, mix_coeffs)
samples = np.zeros(shape=[num_samples, len(mean[0])])
i_start = 0
for i in range(len(mix_coeffs)):
i_end = i_start + z[i]
samples[i_start:i_end, :] = np.random.multivariate_normal(
mean=np.array(mean)[i, :],
cov=np.diag(np.array(cov)[i, :]),
size=z[i])
i_start = i_end
return samples
| 39.732394 | 98 | 0.611485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 255 | 0.090393 |
fb176c1fa7c1a70a322d3da71abe867bb4b0b96a
| 2,431 |
py
|
Python
|
spacy/tests/regression/test_issue999.py
|
cmgreivel/spaCy
|
a31506e06060c559abfeda043503935691af2e98
|
[
"MIT"
] | 88 |
2018-05-06T17:28:23.000Z
|
2022-03-06T20:19:16.000Z
|
spacy/tests/regression/test_issue999.py
|
cmgreivel/spaCy
|
a31506e06060c559abfeda043503935691af2e98
|
[
"MIT"
] | 12 |
2018-07-19T15:11:57.000Z
|
2021-08-05T11:58:29.000Z
|
spacy/tests/regression/test_issue999.py
|
cmgreivel/spaCy
|
a31506e06060c559abfeda043503935691af2e98
|
[
"MIT"
] | 10 |
2018-07-28T22:43:04.000Z
|
2020-11-22T22:58:21.000Z
|
from __future__ import unicode_literals
import os
import random
import contextlib
import shutil
import pytest
import tempfile
from pathlib import Path
import pathlib
from ...gold import GoldParse
from ...pipeline import EntityRecognizer
from ...language import Language
try:
unicode
except NameError:
unicode = str
@pytest.fixture
def train_data():
return [
["hey",[]],
["howdy",[]],
["hey there",[]],
["hello",[]],
["hi",[]],
["i'm looking for a place to eat",[]],
["i'm looking for a place in the north of town",[[31,36,"location"]]],
["show me chinese restaurants",[[8,15,"cuisine"]]],
["show me chines restaurants",[[8,14,"cuisine"]]],
]
@contextlib.contextmanager
def temp_save_model(model):
model_dir = Path(tempfile.mkdtemp())
model.save_to_directory(model_dir)
yield model_dir
shutil.rmtree(model_dir.as_posix())
# TODO: Fix when saving/loading is fixed.
@pytest.mark.xfail
def test_issue999(train_data):
'''Test that adding entities and resuming training works passably OK.
There are two issues here:
1) We have to readd labels. This isn't very nice.
2) There's no way to set the learning rate for the weight update, so we
end up out-of-scale, causing it to learn too fast.
'''
nlp = Language(pipeline=[])
nlp.entity = EntityRecognizer(nlp.vocab, features=Language.Defaults.entity_features)
nlp.pipeline.append(nlp.entity)
for _, offsets in train_data:
for start, end, ent_type in offsets:
nlp.entity.add_label(ent_type)
nlp.entity.model.learn_rate = 0.001
for itn in range(100):
random.shuffle(train_data)
for raw_text, entity_offsets in train_data:
doc = nlp.make_doc(raw_text)
gold = GoldParse(doc, entities=entity_offsets)
loss = nlp.entity.update(doc, gold)
with temp_save_model(nlp) as model_dir:
nlp2 = Language(path=model_dir)
for raw_text, entity_offsets in train_data:
doc = nlp2(raw_text)
ents = {(ent.start_char, ent.end_char): ent.label_ for ent in doc.ents}
for start, end, label in entity_offsets:
if (start, end) in ents:
assert ents[(start, end)] == label
break
else:
if entity_offsets:
raise Exception(ents)
| 29.646341 | 88 | 0.633073 | 0 | 0 | 167 | 0.068696 | 2,054 | 0.84492 | 0 | 0 | 536 | 0.220485 |
fb19896662026b64b3faf3ab0b1d3c77cfab5f56
| 323 |
py
|
Python
|
mavisetc/__init__.py
|
jtmendel/mavisetc
|
4cd6800a7c4462f9a8063060c41e19719d35c5ee
|
[
"MIT"
] | null | null | null |
mavisetc/__init__.py
|
jtmendel/mavisetc
|
4cd6800a7c4462f9a8063060c41e19719d35c5ee
|
[
"MIT"
] | null | null | null |
mavisetc/__init__.py
|
jtmendel/mavisetc
|
4cd6800a7c4462f9a8063060c41e19719d35c5ee
|
[
"MIT"
] | null | null | null |
try:
from ._version import __version__
except(ImportError):
pass
from . import instruments
from . import sources
from . import sky
from . import utils
from . import telescopes
from . import detectors
from . import filters
__all__ = ['instruments', 'sources', 'sky', 'utils', 'telescopes', 'detectors', 'filters']
| 21.533333 | 90 | 0.724458 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 66 | 0.204334 |
fb1a14af54cb6584a01ac6a47d46cd0d5260f471
| 293 |
py
|
Python
|
tests/test_lqr_speed_steer_control.py
|
pruidzeko/PythonRobotics
|
5ff9b70d737121c2947d844ecfb1fa07abdd210c
|
[
"MIT"
] | 38 |
2019-12-08T12:26:04.000Z
|
2022-03-06T11:29:08.000Z
|
tests/test_lqr_speed_steer_control.py
|
pruidzeko/PythonRobotics
|
5ff9b70d737121c2947d844ecfb1fa07abdd210c
|
[
"MIT"
] | 61 |
2020-08-17T20:02:09.000Z
|
2022-03-14T20:01:01.000Z
|
tests/test_lqr_speed_steer_control.py
|
pruidzeko/PythonRobotics
|
5ff9b70d737121c2947d844ecfb1fa07abdd210c
|
[
"MIT"
] | 15 |
2020-02-12T15:57:28.000Z
|
2021-08-28T07:39:18.000Z
|
from unittest import TestCase
import sys
sys.path.append("./PathTracking/lqr_speed_steer_control/")
from PathTracking.lqr_speed_steer_control import lqr_speed_steer_control as m
print(__file__)
class Test(TestCase):
def test1(self):
m.show_animation = False
m.main()
| 18.3125 | 77 | 0.754266 | 93 | 0.317406 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.139932 |
fb1aa25d697063ac5234b37a20af2edde89cf7c2
| 825 |
py
|
Python
|
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
|
AdrianaViabL/Curso-Python-udemy
|
a4f230354985d0f6026a1e7b4913a8f64e205654
|
[
"Apache-2.0"
] | null | null | null |
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
|
AdrianaViabL/Curso-Python-udemy
|
a4f230354985d0f6026a1e7b4913a8f64e205654
|
[
"Apache-2.0"
] | null | null | null |
2 - python intermediario/63 - iteraveis/64 - comportamento iteradores e geradores.py
|
AdrianaViabL/Curso-Python-udemy
|
a4f230354985d0f6026a1e7b4913a8f64e205654
|
[
"Apache-2.0"
] | null | null | null |
#lists, tuplas, strings -> sequencias -> iteraveis
nome = 'nome qualquer'
print('comportamento esperado de um valor iteravel')
print('o valor vai sempre estar la para ser exibido novamente')
for l in nome:
print(l)
print(nome)
print(10 * '=====')
iterador = iter(nome)
try: # quando mostrado x valor de um iterador, o valor nao existe mais nessa variavel
print(next(iterador)) # n
print(next(iterador)) # o
print(next(iterador)) # m
print(next(iterador)) # e
print(next(iterador))
except:
pass
print('CADE OS VALORES???')
for i in iterador:
print(i)
print('\ntrabalhando com gerador\n')
gerador = (letra for letra in nome)
print(next(gerador))
print(next(gerador))
print(next(gerador))
print(next(gerador))
print(next(gerador))
print(10 * '======')
for i in gerador:
print(i)
| 21.153846 | 86 | 0.676364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 322 | 0.390303 |
fb1af2b6ba7c64773e3eb0f188fe914ea2ee6f01
| 1,002 |
py
|
Python
|
src/server/api/API_ingest/dropbox_handler.py
|
carlos-dominguez/paws-data-pipeline
|
5c224e1f259c079631df7d3514a873875c633221
|
[
"MIT"
] | 27 |
2019-11-20T20:20:30.000Z
|
2022-01-31T17:24:55.000Z
|
src/server/api/API_ingest/dropbox_handler.py
|
mrcrnkovich/paws-data-pipeline
|
7c0bd4c5f23276f541611cb564f2f5abbb6b9887
|
[
"MIT"
] | 348 |
2019-11-26T20:34:02.000Z
|
2022-02-27T20:28:20.000Z
|
src/server/api/API_ingest/dropbox_handler.py
|
mrcrnkovich/paws-data-pipeline
|
7c0bd4c5f23276f541611cb564f2f5abbb6b9887
|
[
"MIT"
] | 20 |
2019-12-03T23:50:33.000Z
|
2022-02-09T18:38:25.000Z
|
import dropbox
try:
from secrets_dict import DROPBOX_APP
except ImportError:
# Not running locally
print("Couldn't get DROPBOX_APP from file, trying environment **********")
from os import environ
try:
DROPBOX_APP = environ['DROPBOX_APP']
except KeyError:
# Not in environment
# You're SOL for now
print("Couldn't get DROPBOX_APP from file or environment")
class TransferData:
def __init__(self, access_token):
self.access_token = access_token
def upload_file(self, file_from, file_to):
dbx = dropbox.Dropbox(self.access_token)
with open(file_from, 'rb') as f:
dbx.files_upload(f.read(), file_to)
def upload_file_to_dropbox(file_path, upload_path):
access_token = DROPBOX_APP
transfer_data = TransferData(access_token)
file_from = file_path
file_to = upload_path # The full path to upload the file to, including the file name
transfer_data.upload_file(file_from, file_to)
| 27.833333 | 89 | 0.691617 | 285 | 0.284431 | 0 | 0 | 0 | 0 | 0 | 0 | 258 | 0.257485 |
fb1b088b1122df174835b7dea6617c979527dde6
| 32,165 |
py
|
Python
|
tests/card_tests/druid_tests.py
|
anuragpapineni/Hearthbreaker-evolved-agent
|
d519d42babd93e3567000c33a381e93db065301c
|
[
"MIT"
] | null | null | null |
tests/card_tests/druid_tests.py
|
anuragpapineni/Hearthbreaker-evolved-agent
|
d519d42babd93e3567000c33a381e93db065301c
|
[
"MIT"
] | null | null | null |
tests/card_tests/druid_tests.py
|
anuragpapineni/Hearthbreaker-evolved-agent
|
d519d42babd93e3567000c33a381e93db065301c
|
[
"MIT"
] | null | null | null |
import random
import unittest
from hearthbreaker.agents.basic_agents import DoNothingBot
from tests.agents.testing_agents import SelfSpellTestingAgent, EnemySpellTestingAgent, MinionPlayingAgent, \
EnemyMinionSpellTestingAgent, SpellTestingAgent
from hearthbreaker.constants import CHARACTER_CLASS
from hearthbreaker.game_objects import Game
from hearthbreaker.replay import SavedGame
from tests.testing_utils import generate_game_for, StackedDeck, mock
from hearthbreaker.cards import *
class TestDruid(unittest.TestCase):
def setUp(self):
random.seed(1857)
def test_Innervate(self):
game = generate_game_for(Innervate, StonetuskBoar, SelfSpellTestingAgent, DoNothingBot)
# triggers all four innervate cards the player is holding.
game.play_single_turn()
self.assertEqual(9, game.current_player.mana)
for turn in range(0, 16):
game.play_single_turn()
# The mana should not go over 10 on turn 9 (or any other turn)
self.assertEqual(10, game.current_player.mana)
def test_Moonfire(self):
game = generate_game_for(Moonfire, StonetuskBoar, EnemySpellTestingAgent, MinionPlayingAgent)
game.play_single_turn()
self.assertEqual(26, game.other_player.hero.health)
def test_Claw(self):
testing_env = self
class ClawAgent(EnemySpellTestingAgent):
def do_turn(self, player):
super().do_turn(player)
testing_env.assertEqual(2, game.current_player.hero.temp_attack)
testing_env.assertEqual(2, game.current_player.hero.armor)
game = generate_game_for(Claw, StonetuskBoar, ClawAgent, MinionPlayingAgent)
game.pre_game()
game.play_single_turn()
def test_Naturalize(self):
game = generate_game_for(StonetuskBoar, Naturalize, MinionPlayingAgent, EnemyMinionSpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(5, len(game.other_player.hand))
def test_Savagery(self):
class SavageryAgent(EnemyMinionSpellTestingAgent):
def do_turn(self, player):
if player.mana > 2:
player.hero.power.use()
super().do_turn(player)
game = generate_game_for(Savagery, BloodfenRaptor, SavageryAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
def test_ClawAndSavagery(self):
game = generate_game_for(BloodfenRaptor, [Claw, Claw, Savagery], MinionPlayingAgent,
EnemyMinionSpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
def test_MarkOfTheWild(self):
game = generate_game_for(MarkOfTheWild, StonetuskBoar, EnemyMinionSpellTestingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, game.other_player.minions[0].calculate_attack())
self.assertEqual(3, game.other_player.minions[0].health)
self.assertEqual(3, game.other_player.minions[0].calculate_max_health())
# Test that this spell is being silenced properly as well
game.other_player.minions[0].silence()
self.assertEqual(1, game.other_player.minions[0].calculate_attack())
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(1, game.other_player.minions[0].calculate_max_health())
def test_PowerOfTheWild(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), PowerOfTheWild()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
# This is a test of the +1/+1 option of the Power Of the Wild Card
game = Game([deck1, deck2], [MinionPlayingAgent(), MinionPlayingAgent()])
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].health)
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
self.assertEqual(2, game.current_player.minions[1].calculate_attack())
self.assertEqual(2, game.current_player.minions[1].calculate_max_health())
# This is a test of the "Summon Panther" option of the Power of the Wild Card
agent = MinionPlayingAgent()
agent.choose_option = mock.Mock(side_effect=lambda *options: options[1])
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), PowerOfTheWild()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [agent, MinionPlayingAgent()])
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual("Panther", game.current_player.minions[2].card.__class__.__name__)
self.assertEqual(3, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].calculate_max_health())
def test_WildGrowth(self):
game = generate_game_for(WildGrowth, StonetuskBoar, SelfSpellTestingAgent, DoNothingBot)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, game.current_player.max_mana)
# Make sure that the case where the player is at 10 mana works as well.
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(10, game.other_player.max_mana)
card_draw_mock = mock.Mock(side_effect=game.other_player.draw)
game.other_player.draw = card_draw_mock
game.play_single_turn()
# Each time the player draws, they will draw another wild growth, which will turn into excess mana, which will
# draw another card. However, because of the ordering of the cards, the last excess mana will be after
# a wild growth, which prevents SpellTestingAgent from playing the card, so only 5 draws are made instead of the
# possible 6
self.assertEqual(5, card_draw_mock.call_count)
def test_Wrath(self):
game = generate_game_for(Wrath, StonetuskBoar, EnemyMinionSpellTestingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(5, len(game.current_player.hand))
random.seed(1857)
game = generate_game_for(Wrath, MogushanWarden, EnemyMinionSpellTestingAgent, MinionPlayingAgent)
game.players[0].agent.choose_option = lambda one, three: three
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
# Two wraths will have been played
self.assertEqual(1, game.other_player.minions[0].health)
def test_HealingTouch(self):
game = generate_game_for(HealingTouch, StonetuskBoar, SelfSpellTestingAgent, DoNothingBot)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.other_player.hero.health = 20
game.play_single_turn()
self.assertEqual(28, game.current_player.hero.health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(30, game.current_player.hero.health)
def test_MarkOfNature(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), MarkOfNature()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [MinionPlayingAgent(), MinionPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, game.other_player.minions[0].calculate_attack())
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), MarkOfNature()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
agent = MinionPlayingAgent()
agent.choose_option = lambda *options: options[1]
game = Game([deck1, deck2], [agent, MinionPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertEqual(5, game.other_player.minions[0].health)
self.assertTrue(game.other_player.minions[0].taunt)
def test_SavageRoar(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), SavageRoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [MinionPlayingAgent(), MinionPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
minion_increase_mock = mock.Mock()
game.other_player.minions[0].bind("attack_changed", minion_increase_mock)
game.other_player.minions[1].bind("attack_changed", minion_increase_mock)
player_increase_mock = mock.Mock()
game.other_player.hero.bind("attack_changed", player_increase_mock)
game.play_single_turn()
self.assertEqual(0, game.current_player.mana)
# Make sure the attack got increased
self.assertListEqual([mock.call(2), mock.call(2)], minion_increase_mock.call_args_list)
self.assertListEqual([mock.call(2)], player_increase_mock.call_args_list)
# And make sure that it went down again
self.assertEqual(0, game.current_player.minions[0].temp_attack)
self.assertEqual(0, game.current_player.minions[1].temp_attack)
self.assertEqual(0, game.current_player.hero.calculate_attack())
def test_Bite(self):
testing_env = self
class BiteAgent(EnemySpellTestingAgent):
def do_turn(self, player):
super().do_turn(player)
if player.mana == 0:
testing_env.assertEqual(4, game.current_player.hero.temp_attack)
testing_env.assertEqual(4, game.current_player.hero.armor)
game = generate_game_for(Bite, StonetuskBoar, BiteAgent, DoNothingBot)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
def test_SoulOfTheForest(self):
game = SavedGame("tests/replays/card_tests/SoulOfTheForest.rep")
game.start()
self.assertEqual(2, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[1].calculate_attack())
self.assertEqual(2, game.other_player.minions[1].health)
self.assertEqual("Treant", game.other_player.minions[1].card.name)
def test_Swipe(self):
deck1 = StackedDeck([BloodfenRaptor(), StonetuskBoar(), StonetuskBoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([Swipe()], CHARACTER_CLASS.DRUID, )
game = Game([deck1, deck2], [MinionPlayingAgent(), EnemyMinionSpellTestingAgent()])
game.pre_game()
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
spell_damage_mock = mock.Mock()
game.current_player.minions[0].bind('damaged_by_spell', spell_damage_mock)
game.current_player.minions[1].bind('damaged_by_spell', spell_damage_mock)
game.current_player.minions[2].bind('damaged_by_spell', spell_damage_mock)
swipe_card = game.other_player.hand[0]
game.play_single_turn()
self.assertListEqual([mock.call(4, swipe_card), mock.call(1, swipe_card), mock.call(1, swipe_card)],
spell_damage_mock.call_args_list)
# The bloodfen raptor should be left, with one hp
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(29, game.other_player.hero.health)
def test_KeeperOfTheGrove(self):
# Test Moonfire option
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, MinionPlayingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
# Test Dispel option
random.seed(1857)
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, MinionPlayingAgent, MinionPlayingAgent)
game.players[0].agent.choose_option = lambda moonfire, dispel: dispel
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertTrue(game.current_player.minions[0].charge)
game.play_single_turn()
self.assertFalse(game.other_player.minions[0].charge)
# Test when there are no targets for the spell
random.seed(1857)
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, MinionPlayingAgent, DoNothingBot)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Keeper of the Grove", game.current_player.minions[0].card.name)
def test_DruidOfTheClaw(self):
game = generate_game_for(DruidOfTheClaw, StonetuskBoar, MinionPlayingAgent, DoNothingBot)
for turn in range(0, 9):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(4, game.current_player.minions[0].calculate_attack())
self.assertEqual(4, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].charge)
self.assertFalse(game.current_player.minions[0].taunt)
test_bear = game.current_player.minions[0].card.create_minion(None)
test_bear.player = game.current_player
self.assertEqual(4, test_bear.calculate_attack())
self.assertEqual(4, test_bear.calculate_max_health())
self.assertTrue(test_bear.charge)
self.assertFalse(test_bear.taunt)
game.current_player.agent.choose_option = lambda cat, bear: bear
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(4, game.current_player.minions[0].calculate_attack())
self.assertEqual(6, game.current_player.minions[0].calculate_max_health())
self.assertFalse(game.current_player.minions[0].charge)
self.assertTrue(game.current_player.minions[0].taunt)
test_bear = game.current_player.minions[0].card.create_minion(None)
test_bear.player = game.current_player
self.assertEqual(4, test_bear.calculate_attack())
self.assertEqual(6, test_bear.calculate_max_health())
self.assertFalse(test_bear.charge)
self.assertTrue(test_bear.taunt)
def test_Nourish(self):
# Test gaining two mana
game = generate_game_for(Nourish, StonetuskBoar, SpellTestingAgent, DoNothingBot)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(7, game.current_player.max_mana)
self.assertEqual(7, len(game.current_player.hand))
# Ensure that the case where we would be over 10 mana is handled correctly
game.play_single_turn()
game.play_single_turn()
# Nourish is played twice. The first brings the player to 10, the second only increases the active mana, not
# max_mana
self.assertEqual(10, game.current_player.max_mana)
self.assertEqual(2, game.current_player.mana)
# Test drawing three cards
random.seed(1857)
game = generate_game_for(Nourish, StonetuskBoar, SpellTestingAgent, DoNothingBot)
game.players[0].agent.choose_option = lambda gain2, draw3: draw3
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(10, len(game.current_player.hand))
self.assertEqual(5, game.current_player.max_mana)
def test_Starfall(self):
# Test gaining two mana
game = generate_game_for(Starfall, StonetuskBoar, SpellTestingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(30, game.other_player.hero.health)
# Test drawing three cards
random.seed(1857)
game = generate_game_for(Starfall, MogushanWarden, SpellTestingAgent, MinionPlayingAgent)
game.players[0].agent.choose_option = lambda damageAll, damageOne: damageOne
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(30, game.other_player.hero.health)
def test_ForceOfNature(self):
game = generate_game_for(ForceOfNature, StonetuskBoar, SpellTestingAgent, DoNothingBot)
for turn in range(0, 10):
game.play_single_turn()
def check_minions():
self.assertEqual(3, len(game.current_player.minions))
for minion in game.current_player.minions:
self.assertEqual(2, minion.calculate_attack())
self.assertEqual(2, minion.health)
self.assertEqual(2, minion.calculate_max_health())
self.assertTrue(minion.charge)
self.assertEqual("Treant", minion.card.name)
game.other_player.bind_once("turn_ended", check_minions)
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
def test_Starfire(self):
game = generate_game_for(Starfire, MogushanWarden, EnemyMinionSpellTestingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(7, game.other_player.minions[1].health)
self.assertEqual(9, len(game.current_player.hand))
def test_AncientOfLore(self):
game = generate_game_for(AncientOfLore, Starfire, MinionPlayingAgent, EnemySpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(game.other_player.hero.health, 25)
game.play_single_turn()
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual("Ancient of Lore", game.current_player.minions[0].card.name)
random.seed(1857)
game = generate_game_for(AncientOfLore, StonetuskBoar, MinionPlayingAgent, DoNothingBot)
game.players[0].agent.choose_option = lambda heal, draw: draw
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(10, len(game.current_player.hand))
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual("Ancient of Lore", game.current_player.minions[0].card.name)
def test_AncientOfWar(self):
game = generate_game_for(AncientOfWar, IronbeakOwl, MinionPlayingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual(10, game.current_player.minions[0].health)
self.assertEqual(10, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Ancient of War", game.current_player.minions[0].card.name)
self.assertEqual(5, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.other_player.minions[0].health)
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertFalse(game.other_player.minions[0].taunt)
random.seed(1857)
game = generate_game_for(AncientOfWar, IronbeakOwl, MinionPlayingAgent, MinionPlayingAgent)
game.players[0].agent.choose_option = lambda health, attack: attack
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(10, game.current_player.minions[0].calculate_attack())
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_max_health())
self.assertFalse(game.current_player.minions[0].taunt)
self.assertEqual("Ancient of War", game.current_player.minions[0].card.name)
self.assertEqual(5, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.other_player.minions[0].health)
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertEqual(5, game.other_player.minions[0].calculate_attack())
self.assertFalse(game.other_player.minions[0].taunt)
def test_IronbarkProtector(self):
game = generate_game_for(IronbarkProtector, IronbeakOwl, MinionPlayingAgent, MinionPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(8, game.current_player.minions[0].calculate_attack())
self.assertEqual(8, game.current_player.minions[0].health)
self.assertEqual(8, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Ironbark Protector", game.current_player.minions[0].card.name)
self.assertEqual(6, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(7, len(game.current_player.minions))
self.assertFalse(game.other_player.minions[0].taunt)
def test_Cenarius(self):
deck1 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([WarGolem(), WarGolem(), Cenarius(), Cenarius()], CHARACTER_CLASS.DRUID)
game = Game([deck1, deck2], [DoNothingBot(), MinionPlayingAgent()])
game.pre_game()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
for minion in game.other_player.minions:
self.assertEqual(7, minion.calculate_attack())
self.assertEqual(7, minion.health)
self.assertEqual(7, minion.calculate_max_health())
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual(8, game.current_player.minions[0].health)
self.assertEqual(8, game.current_player.minions[0].calculate_max_health())
self.assertEqual("Cenarius", game.current_player.minions[0].card.name)
for minion_index in range(1, 3):
minion = game.current_player.minions[minion_index]
self.assertEqual(9, minion.calculate_attack())
self.assertEqual(9, minion.health)
self.assertEqual(9, minion.calculate_max_health())
game.players[1].agent.choose_option = lambda stats, summon: summon
game.play_single_turn()
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[1].calculate_attack())
self.assertEqual(8, game.current_player.minions[1].health)
self.assertEqual(8, game.current_player.minions[1].calculate_max_health())
self.assertEqual("Cenarius", game.current_player.minions[1].card.name)
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].health)
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Treant", game.current_player.minions[0].card.name)
self.assertEqual(2, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].health)
self.assertEqual(2, game.current_player.minions[2].calculate_max_health())
self.assertTrue(game.current_player.minions[2].taunt)
self.assertEqual("Treant", game.current_player.minions[2].card.name)
def test_PoisonSeeds(self):
game = generate_game_for([StonetuskBoar, BloodfenRaptor, IronfurGrizzly, PoisionSeeds],
HauntedCreeper, MinionPlayingAgent, MinionPlayingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(3, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(6, len(game.other_player.minions))
for minion in game.current_player.minions:
self.assertEqual("Treant", minion.card.name)
self.assertEqual(2, minion.calculate_attack())
self.assertEqual(2, minion.calculate_max_health())
for index in range(0, 4):
self.assertEqual("Spectral Spider", game.other_player.minions[index].card.name)
self.assertEqual(1, game.other_player.minions[index].calculate_attack())
self.assertEqual(1, game.other_player.minions[index].calculate_max_health())
self.assertEqual("Treant", game.other_player.minions[4].card.name)
self.assertEqual(2, game.other_player.minions[4].calculate_attack())
self.assertEqual(2, game.other_player.minions[4].calculate_max_health())
self.assertEqual("Treant", game.other_player.minions[5].card.name)
self.assertEqual(2, game.other_player.minions[5].calculate_attack())
self.assertEqual(2, game.other_player.minions[5].calculate_max_health())
| 40.818528 | 120 | 0.684626 | 31,669 | 0.98458 | 0 | 0 | 0 | 0 | 0 | 0 | 1,634 | 0.050801 |
fb1c201f8580630fba487344ba7ea5b8003a9260
| 111 |
py
|
Python
|
projectdjangoportfolio/jobs/admin.py
|
DevLuDaley/Portfolio
|
c7215a3b1e5337f9bbb2863bba598b3064ef69e5
|
[
"MIT"
] | null | null | null |
projectdjangoportfolio/jobs/admin.py
|
DevLuDaley/Portfolio
|
c7215a3b1e5337f9bbb2863bba598b3064ef69e5
|
[
"MIT"
] | null | null | null |
projectdjangoportfolio/jobs/admin.py
|
DevLuDaley/Portfolio
|
c7215a3b1e5337f9bbb2863bba598b3064ef69e5
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from.models import Job
admin.site.register(Job)
# Register your models here.
| 18.5 | 32 | 0.801802 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.252252 |
fb1c95abaf459d4750608d1b3b3b8a20f69d8f30
| 504 |
py
|
Python
|
KNN/sklearn API.py
|
wu-huipeng/-
|
84f681f7488e45c5f357f558defbc27aaf285a16
|
[
"Apache-2.0"
] | 7 |
2019-09-09T08:55:41.000Z
|
2020-02-08T13:24:59.000Z
|
KNN/sklearn API.py
|
wu-huipeng/machine-learning
|
84f681f7488e45c5f357f558defbc27aaf285a16
|
[
"Apache-2.0"
] | null | null | null |
KNN/sklearn API.py
|
wu-huipeng/machine-learning
|
84f681f7488e45c5f357f558defbc27aaf285a16
|
[
"Apache-2.0"
] | null | null | null |
from sklearn.neighbors import KNeighborsClassifier
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
x , y = make_classification(n_samples=1000,n_features=40,random_state=42) #构造数据
x_train, x_test ,y_train ,y_test = train_test_split(x,y,random_state=42) #划分数据
knn = KNeighborsClassifier() #构建模型
knn.fit(x_train,y_train) #训练
pred = knn.predict(x_test) #预测值
score = knn.score(x_test,y_test) #测试分数
print(score) #score = 0.76
| 22.909091 | 81 | 0.759921 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.150183 |
fb1d2be116c21e769b849455662ea6590e0d2c00
| 484 |
py
|
Python
|
egg/resources/__init__.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
egg/resources/__init__.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
egg/resources/__init__.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
from egg.resources.console import *
from egg.resources.server import *
from egg.resources.structures import *
from egg.resources.auth import *
from egg.resources.constants import *
from egg.resources.extensions import *
from egg.resources.help import *
from egg.resources.modules import *
from egg.resources.parser import *
from egg.resources.strings import *
from egg.resources.utils import *
from egg.resources.web import *
_author="eanorambuena"
_author_email="[email protected]"
| 32.266667 | 38 | 0.805785 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.070248 |
fb1f5f0f7eda29bb259446b27b1eadc2e86cdafe
| 174 |
py
|
Python
|
eln/commands/news/templates/articles.py
|
lehvitus/eln
|
b78362af20cacffe076bf3dbfd27dcc090e43e39
|
[
"BSD-3-Clause"
] | 2 |
2020-02-05T04:00:32.000Z
|
2020-03-18T02:12:33.000Z
|
eln/commands/news/templates/articles.py
|
oleoneto/eln
|
b78362af20cacffe076bf3dbfd27dcc090e43e39
|
[
"BSD-3-Clause"
] | 1 |
2020-03-18T02:36:04.000Z
|
2020-03-18T02:36:04.000Z
|
eln/commands/news/templates/articles.py
|
oleoneto/eln
|
b78362af20cacffe076bf3dbfd27dcc090e43e39
|
[
"BSD-3-Clause"
] | null | null | null |
from jinja2 import Template
articles_template = Template("""{{ index }}. {{ title }}
\tAuthor: {{ author }}
\tPublished on: {{ publication_date }}
\tSource: {{ url }}
""")
| 19.333333 | 56 | 0.632184 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.649425 |
fb21c669f057d7c99ae18966e97b4d0e1e081af7
| 255 |
py
|
Python
|
statistical_clear_sky/algorithm/exception.py
|
elsirdavid/StatisticalClearSky
|
bc3aa9de56a9347c10e2afe23af486d32d476273
|
[
"BSD-2-Clause"
] | 16 |
2019-05-09T14:17:22.000Z
|
2022-02-23T18:41:13.000Z
|
statistical_clear_sky/algorithm/exception.py
|
elsirdavid/StatisticalClearSky
|
bc3aa9de56a9347c10e2afe23af486d32d476273
|
[
"BSD-2-Clause"
] | 7 |
2019-07-09T18:32:29.000Z
|
2021-07-01T22:28:32.000Z
|
statistical_clear_sky/algorithm/exception.py
|
tadatoshi/StatisticalClearSky
|
40a7354f8e8b65c8008a56c655dddff700031558
|
[
"BSD-2-Clause"
] | 4 |
2019-12-20T19:15:09.000Z
|
2021-04-29T17:40:40.000Z
|
"""
Defines exceptions used in the context of this module "algorithm"
"""
class ProblemStatusError(Exception):
"""Error thrown when SCSF algorithm experiences something other than
an 'optimal' problem status during one of
the solve steps."""
| 28.333333 | 72 | 0.741176 | 179 | 0.701961 | 0 | 0 | 0 | 0 | 0 | 0 | 211 | 0.827451 |
fb223c6e71ca1dc86e9314d86210a866c21d9362
| 220 |
py
|
Python
|
cauldronBase/Base.py
|
Razikus/cauldron
|
cc6e87cb4283efe00ad7e06c98d05e8571883447
|
[
"MIT"
] | null | null | null |
cauldronBase/Base.py
|
Razikus/cauldron
|
cc6e87cb4283efe00ad7e06c98d05e8571883447
|
[
"MIT"
] | null | null | null |
cauldronBase/Base.py
|
Razikus/cauldron
|
cc6e87cb4283efe00ad7e06c98d05e8571883447
|
[
"MIT"
] | null | null | null |
from sqlalchemy.ext.declarative import declarative_base
from flask import Flask
from flask import jsonify
from flask_marshmallow import Marshmallow
Base = declarative_base()
app = Flask(__name__)
ma = Marshmallow(app)
| 22 | 55 | 0.827273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
fb2248e40d3ed11337557c6e7ae9b71e63c167b4
| 3,622 |
py
|
Python
|
BP.py
|
WolfMy/predict_stock
|
7af33404875d19ea93328b8f220d3bd2c0f6d2e5
|
[
"MIT"
] | 1 |
2021-09-28T02:02:05.000Z
|
2021-09-28T02:02:05.000Z
|
BP.py
|
WolfMy/predict_stock
|
7af33404875d19ea93328b8f220d3bd2c0f6d2e5
|
[
"MIT"
] | null | null | null |
BP.py
|
WolfMy/predict_stock
|
7af33404875d19ea93328b8f220d3bd2c0f6d2e5
|
[
"MIT"
] | 1 |
2020-10-13T12:13:43.000Z
|
2020-10-13T12:13:43.000Z
|
import tensorflow as tf
import numpy as np
import pandas as pd
from MACD_RSI import init_train_data
def get_batch(data, label, batch_size, num_epochs):
input_queue = tf.train.slice_input_producer([data, label], num_epochs=num_epochs, shuffle=True, capacity=32)
x_batch, y_batch = tf.train.batch(input_queue, batch_size=batch_size, num_threads=1, capacity=32, allow_smaller_final_batch=False)
return x_batch, y_batch
def BP(data_train, label_train, input_size, num_classes, learning_rate=0.001, batch_size=64, num_epochs=1000):
X = tf.placeholder(tf.float32, shape=[None, input_size])
Y = tf.placeholder(tf.float32, shape=[None, num_classes])
W1 = tf.Variable (tf.random_uniform([input_size,10], 0,1))
B1 = tf.Variable (tf.zeros([1, 10]))
hidden_y1 = tf.nn.relu (tf.matmul(X, W1) + B1)
W2 = tf.Variable (tf.random_uniform([10,7], 0,1))
B2 = tf.Variable (tf.zeros([1, 7]))
hidden_y2 = tf.nn.relu (tf.matmul(hidden_y1, W2) + B2)
W3 = tf.Variable (tf.random_uniform([7, num_classes], 0.1))
B3 = tf.Variable (tf.zeros([1, num_classes]))
final_opt = tf.nn.softmax(tf.matmul(hidden_y2, W3) + B3)
loss = tf.reduce_mean (tf.nn.softmax_cross_entropy_with_logits (labels = Y, logits = final_opt))
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss)
correct_prediction = tf.equal(tf.argmax(final_opt,1), tf.argmax(Y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
x_batch, y_batch = get_batch(data_train, label_train, batch_size, num_epochs)
with tf.Session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(tf.global_variables_initializer())
# 开启协调器
coord = tf.train.Coordinator()
# 使用start_queue_runners 启动队列填充
threads = tf.train.start_queue_runners(sess, coord)
epoch = 0
try:
while not coord.should_stop():
# 获取训练用的每一个batch中batch_size个样本和标签
batch_input, batch_label = sess.run([x_batch, y_batch])
sess.run (train_step, feed_dict = {X: batch_input, Y: batch_label})
if epoch % 200 == 0 :
train_accuracy = sess.run(accuracy, feed_dict = {X: batch_input, Y: batch_label})
#test_accuracy = sess.run(accuracy, feed_dict = {X: data_test, Y: label_test})
#print ("step : %d, training accuracy = %g, test_accuracy = %g " % (epoch, train_accuracy, test_accuracy))
print ("step : %d, training accuracy = %g " % (epoch, train_accuracy))
print("loss:", sess.run(loss, feed_dict={X: batch_input, Y: batch_label}))
epoch = epoch + 1
except tf.errors.OutOfRangeError: # num_epochs 次数用完会抛出此异常
print("---Train end---")
finally:
# 协调器coord发出所有线程终止信号
coord.request_stop()
coord.join(threads) # 把开启的线程加入主线程,等待threads结束
print('---Programm end---')
# 训练完成后,记录test_accuracy,返回[stock,test_accuracy]
train_accuracy = sess.run(accuracy, feed_dict = {X: batch_input, Y: batch_label})
return train_accuracy
start_date = '2018-11-20'
end_date = '2019-03-01'
stock_list = ['603000', '002230', '300492', '601688']
df = pd.DataFrame(stock_list, columns=['stock'])
train_acc = []
for stock in stock_list:
data, label = init_train_data(stock, start_date, end_date)
train_acc.append(BP(data, label, input_size=2, num_classes=3, learning_rate=0.001, batch_size=32))
df['train_acc'] = train_acc
print(df.sort_values(['train_acc'], ascending=False))
| 48.293333 | 134 | 0.655163 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 699 | 0.184921 |
fb2280120f9263888a0811988826a4305d6b98aa
| 3,525 |
py
|
Python
|
bananas/transformers/running_stats.py
|
owahltinez/bananas
|
4d37af1713b7f166ead3459a7004748f954d336e
|
[
"MIT"
] | null | null | null |
bananas/transformers/running_stats.py
|
owahltinez/bananas
|
4d37af1713b7f166ead3459a7004748f954d336e
|
[
"MIT"
] | null | null | null |
bananas/transformers/running_stats.py
|
owahltinez/bananas
|
4d37af1713b7f166ead3459a7004748f954d336e
|
[
"MIT"
] | null | null | null |
""" Threshold-based transformers """
from typing import Dict, Iterable, Union
from ..changemap.changemap import ChangeMap
from ..utils.arrays import flatten, shape_of_array
from .base import ColumnHandlingTransformer
class RunningStats(ColumnHandlingTransformer):
"""
A helpful transformer that does not perform any transformations is `RunningStats`. It implements a
number of running statistics on the requested features that can be used by other transformers
expending that one. Keep reading below for several examples of transformers that extend
`RunningStats`. Here's an illustration of what `RunningStats` can do:
```python
arr = [random.random() for _ in range(100)]
transformer = RunningStats()
transformer.fit(arr)
transformer.print_stats()
# Output:
# col min_ max_ mean_ count_ stdev_ variance_
# 0 0.001 0.996 0.586 100.000 0.264 0.070
```
"""
def __init__(self, columns: Union[Dict, Iterable[int]] = None, verbose: bool = False, **kwargs):
"""
Parameters
----------
columns : Union[Dict, Iterable[int]]
TODO
verbose : bool
TODO
"""
super().__init__(columns=columns, verbose=verbose, **kwargs)
# Initialize working variables
self.max_ = {}
self.min_ = {}
self.mean_ = {}
self.count_ = {}
self.stdev_ = {}
self.variance_ = {}
self._delta_squared_ = {}
def fit(self, X):
X = self.check_X(X)
for i, col in enumerate(X):
if i not in self.columns_:
continue
# High dimensional data, like images, is treated as a 1D list
shape = shape_of_array(col)
if len(shape) > 1:
col = flatten(col)
# Computing max / min is trivial
sample_max = max(col)
sample_min = min(col)
self.max_[i] = max(self.max_.get(i, sample_max), sample_max)
self.min_[i] = min(self.min_.get(i, sample_min), sample_min)
# Use on-line algorithm to compute variance, which unfortunately requires iterating
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#On-line_algorithm
for val in col:
prev_mean = self.mean_.get(i, 0.0)
self.count_[i] = self.count_.get(i, 0) + 1
self.mean_[i] = prev_mean + (val - prev_mean) / self.count_[i]
self._delta_squared_[i] = self._delta_squared_.get(i, 0.0) + (
val - self.mean_[i]
) * (val - prev_mean)
self.variance_[i] = self._delta_squared_[i] / self.count_[i]
self.stdev_[i] = self.variance_[i] ** 0.5
return self
def on_input_shape_changed(self, change_map: ChangeMap):
# Parent's callback will take care of adapting feature changes
super().on_input_shape_changed(change_map)
# We still need to adapt feature changes to internal data
self._input_change_column_adapter(
change_map, ["min_", "max_", "mean_", "count_", "stdev_", "variance_"]
)
def print_stats(self):
stats = ["min_", "max_", "mean_", "count_", "stdev_", "variance_"]
print()
print("\t".join(["col"] + stats))
for col in self.columns_.keys():
print(
"%d\t%s" % (col, "\t".join(["%.03f" % getattr(self, stat)[col] for stat in stats]))
)
print()
| 36.340206 | 102 | 0.588652 | 3,304 | 0.937305 | 0 | 0 | 0 | 0 | 0 | 0 | 1,369 | 0.388369 |
fb22dc5bf5bd9013f1aaf4889e65a75279b6e791
| 197 |
py
|
Python
|
1-beginner/1013.py
|
alenvieira/uri-online-judge-solutions
|
ca5ae7064d84af4dae12fc37d4d14ee441e49d06
|
[
"MIT"
] | null | null | null |
1-beginner/1013.py
|
alenvieira/uri-online-judge-solutions
|
ca5ae7064d84af4dae12fc37d4d14ee441e49d06
|
[
"MIT"
] | null | null | null |
1-beginner/1013.py
|
alenvieira/uri-online-judge-solutions
|
ca5ae7064d84af4dae12fc37d4d14ee441e49d06
|
[
"MIT"
] | null | null | null |
line = input()
(a, b, c) = [int(i) for i in line.split(' ')]
greatest1 = (a + b + abs(a - b)) / 2
greatest2 = int((greatest1 + c + abs(greatest1 - c)) / 2)
print('{} eh o maior'.format(greatest2))
| 32.833333 | 57 | 0.568528 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.091371 |
fb24589362ad5872b751372ecb28f328c4ec3892
| 1,907 |
py
|
Python
|
utils.py
|
psergal/quiz
|
9420db013a4ca0662471f716ed5fc1f9cfb2502a
|
[
"MIT"
] | null | null | null |
utils.py
|
psergal/quiz
|
9420db013a4ca0662471f716ed5fc1f9cfb2502a
|
[
"MIT"
] | null | null | null |
utils.py
|
psergal/quiz
|
9420db013a4ca0662471f716ed5fc1f9cfb2502a
|
[
"MIT"
] | null | null | null |
import argparse
from pathlib import Path
import random
def get_args():
parser = argparse.ArgumentParser(description='Launching VK chat bot')
parser.add_argument('-m', '--memcached_server', default='redis-12388.c52.us-east-1-4.ec2.cloud.redislabs.com',
help='Set the server to store and retrieve questions and answers. Redislabs set default')
parser.add_argument('-c', '--memcached_charset', default='utf-8',
help='Set the charset for the stored values. utf-8 set default')
arguments = parser.parse_args()
return arguments
def get_quiz_questions():
local_path = Path('.')
quiz_path = local_path / 'quiz-questions'
quiz_files = list(quiz_path.glob('*.*'))
quiz_file = random.choice(quiz_files)
quiz_lines = quiz_file.read_text(encoding='KOI8-R').splitlines()
question_lines_dict = {line_number: file_quiz_line
for line_number, file_quiz_line in enumerate(quiz_lines, start=1)
if file_quiz_line.startswith('Вопрос') and file_quiz_line.endswith(':')}
for quiz_question in question_lines_dict:
question_text, answer_text = '', ''
q_flag = True
for quiz_text_line in quiz_lines[quiz_question::]:
if quiz_text_line and q_flag:
question_text = f'{question_text}{quiz_text_line} '
elif q_flag:
q_flag = False
continue
if not quiz_text_line and not q_flag:
break
elif quiz_text_line.startswith('Ответ:'):
continue
elif not q_flag:
answer_text = f'{answer_text}{quiz_text_line} '
question_lines_dict[quiz_question] = [question_lines_dict.get(quiz_question),
{'q': question_text, 'a': answer_text}]
return question_lines_dict
| 44.348837 | 114 | 0.624541 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 414 | 0.21585 |
fb2468353432def77628e6adb7da27b64ec4c1b6
| 2,684 |
py
|
Python
|
text-editor.py
|
Shubham05178/Text-Editor
|
82fff346880bb9e2088a16af20695bb46d68d29a
|
[
"MIT"
] | 1 |
2021-09-24T16:13:14.000Z
|
2021-09-24T16:13:14.000Z
|
text-editor.py
|
Shubham05178/Text-Editor
|
82fff346880bb9e2088a16af20695bb46d68d29a
|
[
"MIT"
] | null | null | null |
text-editor.py
|
Shubham05178/Text-Editor
|
82fff346880bb9e2088a16af20695bb46d68d29a
|
[
"MIT"
] | null | null | null |
#Author Shubham Nagaria. ShubhamLabs.
#Coder.
#[email protected]
import tkinter
from tkinter import *
from tkinter import Tk, scrolledtext, filedialog, messagebox
root = Tk(className=" TextEditor-ShubhamLabs") #name your texteditor in quotes
textPad = scrolledtext.ScrolledText(root, width=100, height=80)
#Defining Menu bar and sub-commands
def about():
win = Tk()
win.wm_title("About")
frame1 = Frame(
master = win,
bg = '#800000'
)
frame1.pack(fill='both', expand='yes')
editArea = Text(
master = frame1,
wrap = WORD,
width = 30,
height = 15)
#Don't use widget.place(), use pack or grid instead, since
# They behave better on scaling the window -- and you don't
# have to calculate it manually!
editArea.pack(padx=10, pady=10, fill=BOTH, expand=True)
# Adding some text in About.
editArea.insert(INSERT,
"""
This Software was created by Shubham Nagaria as a final year undergrad at GLA University,Mathura. This software was programmed in Python 3.5. Tkinter was used to create GUI version. [email protected], mail me for doubts, queries or any other stuff.
""")
editArea.configure(state='disable') #To make ScrolledText read-only, unlike our root window.
win.mainloop()
def save_file():
file = filedialog.asksaveasfile(mode='w')
if file != None:
# chop the last character from get, as an added extra return
data = textPad.get('1.0', END+'-1c')
file.write(data)
file.close()
def open_file():
file = filedialog.askopenfile(parent=root,mode='rb',title='Select file')
if file != None:
contents = file.read()
textPad.insert('1.0',contents)
file.close()
def exit_file():
if messagebox.askokcancel("Exit", """Are you sure you want to exit?
Shubhamlabs thanks you for using our Code."""):
root.destroy()
def begin(): #just to ensure code is running correctly.
print ("Shubhamlabs thanks you for using our code.")
#Adding menus to our text editor.
menu = Menu(root)
root.config(menu=menu)
file_menu = Menu(menu)
menu.add_cascade(label="File", menu=file_menu)
file_menu.add_command(label="New", command=begin)
file_menu.add_command(label="Open", command=open_file)
file_menu.add_command(label="Save", command=save_file)
file_menu.add_separator()
file_menu.add_command(label="Exit", command=exit_file)
help_menu = Menu(menu)
menu.add_cascade(label="Help", menu=help_menu)
help_menu.add_command(label="About", command=about)
textPad.pack()#we pack everything :P
root.mainloop() #we run script in loop.
| 32.731707 | 256 | 0.676975 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,104 | 0.411326 |
fb27d1e80db32688183c68acefc2b5b91660e81e
| 852 |
py
|
Python
|
src/core/schemas/entries/event.py
|
nefarius/portfolio-backend
|
f595041354eedee71a4aa5b761501be030b81d09
|
[
"Apache-2.0"
] | 6 |
2019-06-19T12:56:42.000Z
|
2021-12-26T07:22:47.000Z
|
src/core/schemas/entries/event.py
|
nefarius/portfolio-backend
|
f595041354eedee71a4aa5b761501be030b81d09
|
[
"Apache-2.0"
] | 13 |
2019-12-20T10:39:44.000Z
|
2022-02-10T09:11:09.000Z
|
src/core/schemas/entries/event.py
|
nefarius/portfolio-backend
|
f595041354eedee71a4aa5b761501be030b81d09
|
[
"Apache-2.0"
] | 1 |
2021-12-01T12:03:29.000Z
|
2021-12-01T12:03:29.000Z
|
from ...schemas import ICON_EVENT
from ...skosmos import get_collection_members
from ..base import BaseSchema
from ..general import get_contributors_field, get_date_range_time_range_location_group_field, get_url_field
from ..utils import years_from_date_range_time_range_location_group_field
ICON = ICON_EVENT
TYPES = get_collection_members('http://base.uni-ak.ac.at/portfolio/taxonomy/collection_event', use_cache=False)
class EventSchema(BaseSchema):
contributors = get_contributors_field({'order': 1})
date_range_time_range_location = get_date_range_time_range_location_group_field({'order': 2})
url = get_url_field({'order': 3})
def year_display(self, data):
if data.get('date_range_time_range_location'):
return years_from_date_range_time_range_location_group_field(data['date_range_time_range_location'])
| 42.6 | 112 | 0.805164 | 425 | 0.498826 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.172535 |
fb28158cf8145cabc88ee46d040cb82c54962f04
| 787 |
py
|
Python
|
fn/instaAPI.py
|
elsou/ETSE-Warbot
|
4fd5351688e3cd81d9eeed50586027830dba0c5b
|
[
"MIT"
] | 2 |
2021-11-09T23:14:53.000Z
|
2021-11-11T01:09:28.000Z
|
fn/instaAPI.py
|
elsou/etse-warbot
|
4fd5351688e3cd81d9eeed50586027830dba0c5b
|
[
"MIT"
] | null | null | null |
fn/instaAPI.py
|
elsou/etse-warbot
|
4fd5351688e3cd81d9eeed50586027830dba0c5b
|
[
"MIT"
] | null | null | null |
from instabot import Bot
import os
import shutil
import time
# Dado un tweet (str) e imaxe (str '*.jpeg'), publica o contido en instagram
def upload(tweet, imaxe):
clean_up()
bot = Bot()
bot.login(username="usename", password="password")
time.sleep(1)
bot.upload_photo(imaxe, caption=tweet)
# ...
def clean_up():
dir = "../config"
remove_me = "imgs\img.jpg.REMOVE_ME"
# checking whether config folder exists or not
if os.path.exists(dir):
try:
# removing it so we can upload new image
shutil.rmtree(dir)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
if os.path.exists(remove_me):
src = os.path.realpath("imgs\img.jpg")
os.rename(remove_me, src)
| 23.848485 | 76 | 0.617535 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.320203 |
fb29a12d8ace63ae0afef58e7d2a5734abf0e3c4
| 2,088 |
py
|
Python
|
memories/api/utils.py
|
marchelbling/memories-api
|
e82d6c6ae2b7873fc35ebb301fc073e3ef968a1e
|
[
"MIT"
] | null | null | null |
memories/api/utils.py
|
marchelbling/memories-api
|
e82d6c6ae2b7873fc35ebb301fc073e3ef968a1e
|
[
"MIT"
] | null | null | null |
memories/api/utils.py
|
marchelbling/memories-api
|
e82d6c6ae2b7873fc35ebb301fc073e3ef968a1e
|
[
"MIT"
] | null | null | null |
import time
import logging
from functools import wraps
logging.basicConfig(level=logging.INFO,
format='[%(asctime)s] [%(levelname)s] %(message)s')
def timehttp(endpoint):
@wraps(endpoint)
def timed(self, request, response):
start = time.time()
result = endpoint(self, request, response)
logging.info("{uri} => {status} {response} in {timing:.3f}s"
.format(uri=request.uri,
status=response.status,
response=response.body,
timing=time.time() - start))
return result
return timed
def timeit(func):
@wraps(func)
def timed(*args, **kwargs):
argument_names = func.func_code.co_varnames[:func.func_code.co_argcount]
arguments = args[:len(argument_names)]
defaults = func.func_defaults or ()
arguments += defaults[len(defaults) - (func.func_code.co_argcount - len(arguments)):]
params = zip(argument_names, arguments)
arguments = arguments[len(argument_names):]
if arguments: params.append(('args', arguments))
if kwargs: params.append(('kwargs', kwargs))
call = func.func_name + '(' + ', '.join('%s=%r' % p for p in params) + ')'
start = time.time()
result = func(*args, **kwargs)
logging.info("{call} = {result} [{timing:.3f}s]"
.format(call=call,
result=unicode(result, 'utf8', errors='ignore').encode('utf8'),
timing=time.time() - start))
return result
return timed
def safe_utf8(string):
def safe_decode(string):
try:
return unicode(string, 'utf8')
except UnicodeError:
try:
return unicode(string, 'latin1')
except UnicodeError:
return unicode(string, 'utf8', 'replace')
if string is None:
return string
if not isinstance(string, unicode):
string = safe_decode(string)
return string.encode('utf8')
| 33.677419 | 93 | 0.561782 | 0 | 0 | 0 | 0 | 1,377 | 0.659483 | 0 | 0 | 211 | 0.101054 |
fb2abd56368d34ecbc07a95fe8f4a470ff486455
| 24,641 |
py
|
Python
|
cssedit/editor/gui_qt.py
|
albertvisser/cssedit
|
e17ed1b43a0e4d50bfab6a69da47b92cd3213724
|
[
"MIT"
] | null | null | null |
cssedit/editor/gui_qt.py
|
albertvisser/cssedit
|
e17ed1b43a0e4d50bfab6a69da47b92cd3213724
|
[
"MIT"
] | null | null | null |
cssedit/editor/gui_qt.py
|
albertvisser/cssedit
|
e17ed1b43a0e4d50bfab6a69da47b92cd3213724
|
[
"MIT"
] | null | null | null |
"""cssedit: PyQt specific stuff
"""
import sys
import os
import PyQt5.QtWidgets as qtw
import PyQt5.QtGui as gui
import PyQt5.QtCore as core
from .cssedit import parse_log_line, get_definition_from_file
class MainGui(qtw.QMainWindow):
"""Hoofdscherm van de applicatie
"""
def __init__(self, master, app, title='', pos=(0, 0), size=(800, 500)):
self.master = master
if not app:
selfcontained = True
self.app = qtw.QApplication(sys.argv)
else:
self.app = app
print('in csseditor.maingui, app=', self.app)
super().__init__()
self.set_window_title()
if self.master.app_iconame:
self.setWindowIcon(gui.QIcon(self.master.app_iconame))
offset = 40 if os.name != 'posix' else 10
self.move(pos[0] + offset, pos[1] + offset)
self.resize(size[0], size[1])
self.statusbar = self.statusBar()
self.tree = TreePanel(self)
self.setCentralWidget(self.tree)
def create_menu(self, menudata):
"""bouw het menu en de meeste toolbars op"""
menubar = self.menuBar()
self.menus = {} # we may need this if we need to do something with specific menus later
for item, data in menudata:
menu = menubar.addMenu(item)
self.menus[item] = menu
for menudef in data:
if not menudef:
menu.addSeparator()
continue
label, handler, shortcut, icon, info = menudef
if isinstance(handler, tuple): # TODO: find a nicer way
submenu = menu.addMenu(label)
for item in handler:
# define submenu options
pass
continue
if icon:
action = qtw.QAction(gui.QIcon(os.path.join(HERE, icon)), label,
self)
## if not toolbar_added:
## toolbar = self.addToolBar(item)
## toolbar.setIconSize(core.QSize(16, 16))
## toolbar_added = True
## toolbar.addAction(action)
else:
action = qtw.QAction(label, self)
## if item == menudata[3][0]:
## if label == '&Undo':
## self.undo_item = action
## elif label == '&Redo':
## self.redo_item = action
if shortcut:
action.setShortcuts([x for x in shortcut.split(",")])
## if info.startswith("Check"):
## action.setCheckable(True)
if info:
action.setStatusTip(info)
action.triggered.connect(handler)
# action.triggered.connect(handler) werkt hier niet
if label:
menu.addAction(action)
self.master.actiondict[label] = action
def just_show(self):
"""standalone aansturen
"""
self.show()
sys.exit(self.app.exec_())
def set_modality_and_show(self, modal):
"""blokkerend gedrag instellen als aangestuurd vanuit bv. Htmledit
"""
print('in csseditorgui.set_modality_and_show, modal is', modal)
modality = core.Qt.ApplicationModal if modal else core.Qt.NonModal
self.setWindowModality(modality)
self.show()
def show_message(self, text, title=""):
"show a message in a box with a title"
title = title or self.master.app_title
qtw.QMessageBox.information(self, title, text)
def show_statusmessage(self, text):
"set the message at the bottom of the window"
self.statusbar.showMessage(text)
def close(self):
"""reimplemented method from superclass
"""
self.master.close()
super().close()
def set_window_title(self, title=''):
"set the title for the GUI window"
title = title or self.master.app_title
self.setWindowTitle(title)
def set_waitcursor(self, on):
"set cursor to clock or back to default"
if on:
self.app.setOverrideCursor(gui.QCursor(core.Qt.WaitCursor))
else:
self.app.restoreOverrideCursor()
def show_save_dialog(self, start, filter):
"get name of file to save"
return qtw.QFileDialog.getSaveFileName(self, self.master.app_title, start, filter)[0]
def show_open_dialog(self, start, filter):
"get name of file to open"
return qtw.QFileDialog.getOpenFileName(self, self.master.app_title, start, filter)[0]
def get_input_text(self, prompt):
"get text from user input"
return qtw.QInputDialog.getText(self, self.master.app_title, prompt)
def get_input_choice(self, prompt, choices, editable=False):
"get user to choice from a list of options"
return qtw.QInputDialog.getItem(self, self.master.app_title, prompt, choices, editable)
def show_dialog(self, cls, *args):
"show and return the results of a dialog"
edt = cls(self, *args).exec_()
if edt == qtw.QDialog.Accepted:
return True, self.dialog_data
else:
return False, None
class TreePanel(qtw.QTreeWidget):
"Tree structure"
def __init__(self, parent):
self.parent = parent
super().__init__()
self.setColumnCount(2)
self.hideColumn(1)
self.headerItem().setHidden(True)
## self.setAcceptDrops(True)
## self.setDragEnabled(True)
self.setSelectionMode(self.SingleSelection)
## self.setDragDropMode(self.InternalMove)
## self.setDropIndicatorShown(True)
self.setUniformRowHeights(True)
def selectionChanged(self, newsel, oldsel):
"""wordt aangeroepen als de selectie gewijzigd is
de tekst van de oude selectie wordt in de itemdict geactualiseerd
en die van de nieuwe wordt erin opgezocht en getoond"""
# helaas zijn newsel en oldsel niet makkelijk om te rekenen naar treeitems
# self.parent.check_active()
# h = self.currentItem()
# self.parent.activate_item(h)
def dropEvent(self, event):
"""wordt aangeroepen als een versleept item (dragitem) losgelaten wordt over
een ander (dropitem)
Het komt er altijd *onder* te hangen als laatste item
deze methode breidt de Treewidget methode uit met wat visuele zaken
"""
# copied from DocTree but not implemented yet
# dragitem = self.selectedItems()[0]
# dragparent = dragitem.parent()
# dropitem = self.itemAt(event.pos())
# if not dropitem:
# # ## event.ignore()
# return
# qtw.QTreeWidget.dropEvent(self, event)
# count = self.topLevelItemCount()
# if count > 1:
# for ix in range(count):
# if self.topLevelItem(ix) == dragitem:
# self.takeTopLevelItem(ix)
# self.oldparent.insertChild(self.oldpos, dragitem)
# self.setCurrentItem(dragitem)
# break
# return
# self.parent.set_project_dirty(True)
# self.setCurrentItem(dragitem)
# dropitem.setExpanded(True)
super().dropEvent(event)
def mousePressEvent(self, event):
"""remember the current parent in preparation for "canceling" a dragmove
"""
# copied from DocTree but not implemented yet
# xc, yc = event.x(), event.y()
# item = self.itemAt(xc, yc)
# if item:
# self.oldparent, self.oldpos = self._getitemparentpos(item)
super().mousePressEvent(event)
def mouseReleaseEvent(self, event):
"for showing a context menu"
# copied from DocTree but not implemented yet
# if event.button() == core.Qt.RightButton:
# xc, yc = event.x(), event.y()
# item = self.itemAt(xc, yc)
# if item:
# self.create_popupmenu(item)
# return
super().mouseReleaseEvent(event)
def keyReleaseEvent(self, event):
"also for showing a context menu"
# copied from DocTree but not implemented yet
# if event.key() == core.Qt.Key_Menu:
# item = self.currentItem()
# self.create_popupmenu(item)
# return
super().keyReleaseEvent(event)
def create_popupmenu(self, item):
"""create a menu in the right place"""
# copied from DocTree but not implemented yet
# menu = qtw.QMenu()
# for action in self.parent.notemenu.actions():
# act = menu.addAction(action)
# if item == self.parent.root and action.text() in ('&Add', '&Delete',
# '&Forward', '&Back'):
# action.setEnabled(False)
# menu.addSeparator()
# for action in self.parent.treemenu.actions():
# menu.addAction(action)
# if item == self.parent.root:
# action.setEnabled(False)
# menu.exec_(self.mapToGlobal(self.visualItemRect(item).center()))
# if item == self.parent.root:
# for action in self.parent.notemenu.actions():
# if item == self.parent.root and action.text() in ('&Add', '&Delete',
# '&Forward', '&Back'):
# action.setEnabled(True)
# for action in self.parent.treemenu.actions():
# action.setEnabled(True)
def remove_root(self):
self.takeTopLevelItem(0)
def init_root(self):
self.root = qtw.QTreeWidgetItem()
self.root.setText(0, "(untitled)")
self.addTopLevelItem(self.root)
def set_root_text(self, text):
self.root.setText(0, text)
def get_root(self):
return self.root
def activate_rootitem(self):
self.setCurrentItem(self.root)
def set_activeitem(self, item):
self.activeitem = item
def set_focus(self):
self.setFocus()
def add_to_parent(self, titel, parent, pos=-1):
"""shortcut for new_treeitem + add_subitem
"""
titel = titel.rstrip()
new = self.new_treeitem(titel)
self.add_subitem(parent, new, pos)
return new
def setcurrent(self, item):
self.setCurrentItem(item)
def getcurrent(self):
return self.currentItem()
@classmethod
def new_treeitem(self, itemtext):
"""build new item for tree
"""
item = qtw.QTreeWidgetItem()
item.setText(0, itemtext)
item.setToolTip(0, itemtext)
return item
@classmethod
def add_subitem(self, parent, child, ix=-1):
"add a subnode to a node. If ix is provided, it should indicate a position"
if ix == -1:
parent.addChild(child)
else:
parent.insertChild(ix, child)
@classmethod
def remove_subitem(self, parent, ix):
"remove a subnode from a node. If ix is provided, it should indicate a position"
parent.takeChild(ix)
@classmethod
def get_subitems(self, item):
"returns a list of a tree item's children"
return [item.child(i) for i in range(item.childCount())]
@classmethod
def set_itemtext(self, item, itemtext):
"sets the text of a tree item"
item.setText(0, itemtext)
item.setToolTip(0, itemtext)
@classmethod
def get_itemtext(self, item):
"returns the text of a tree item"
return item.text(0)
@classmethod
def getitemparentpos(self, item):
"return parent of current item and sequential position under it"
root = item.parent()
if root:
pos = root.indexOfChild(item)
else:
pos = -1
return root, pos
@classmethod
def expand_item(self, item):
"show the item's subitems"
item.setExpanded(True)
@classmethod
def collapse_item(self, item):
"hide the item's subitems"
item.setExpanded(False)
class LogDialog(qtw.QDialog):
"Simple Log display"
text = "css definition that triggers this message:\n\n"
def __init__(self, parent, log):
self.parent = parent
super().__init__(parent)
self.setWindowTitle(self.parent.master.app_title + " - show log for current file")
## self.setWindowIcon(self.parent.app_icon)
txt = qtw.QLabel("Dubbelklik op een regel om de context "
"(definitie in de css) te bekijken")
self.lijst = qtw.QListWidget(self)
## self.lijst.setSelectionMode(gui.QAbstractItemView.SingleSelection)
self.lijst.addItems(log)
b1 = qtw.QPushButton("&Toon Context", self)
b1.clicked.connect(self.show_context)
b2 = qtw.QPushButton("&Klaar", self)
b2.clicked.connect(self.done)
vbox = qtw.QVBoxLayout()
hbox = qtw.QHBoxLayout()
hbox.addWidget(txt)
vbox.addLayout(hbox)
hbox = qtw.QHBoxLayout()
hbox.addWidget(self.lijst)
vbox.addLayout(hbox)
hbox = qtw.QHBoxLayout()
hbox.addWidget(b1)
hbox.addWidget(b2)
hbox.insertStretch(0, 1)
hbox.addStretch(1)
vbox.addLayout(hbox)
self.setLayout(vbox)
self.resize(600, 480)
self.exec_()
def itemDoubleClicked(self, item):
"""handler for doubleclicking over a line
"""
self.show_context(item)
def show_context(self, item=None):
"""show full logline (in case it's been chopped off)
and the definition that triggered it
"""
# determine selected line in the list and get associated data
# import pdb; pdb.set_trace()
selected = item or self.lijst.currentItem()
y = parse_log_line(selected.text())
context = get_definition_from_file(self.parent.master.project_file, y.line, y.pos)
# pop up a box to show the data
title = self.parent.master.app_title + " - show context for log message"
qtw.QMessageBox.information(self, title, self.text + context)
def done(self, arg=None):
"""finish dialog
"""
super().done(0)
class TextDialog(qtw.QDialog):
"""dialoog om een ongedefinieerde tekst (bv. van een commentaar) weer te geven
d.m.v. een multiline tekst box
"""
def __init__(self, parent, title='', text=''): # , comment=False):
self._parent = parent
super().__init__(parent)
self.setWindowTitle(title)
self.resize(440, 280)
vbox = qtw.QVBoxLayout()
hbox = qtw.QHBoxLayout()
self.data_text = qtw.QTextEdit(self)
## self.data_text.resize(440, 280)
hbox.addSpacing(50)
self.data_text.setText(text)
hbox.addWidget(self.data_text)
hbox.addSpacing(50)
vbox.addLayout(hbox)
hbox = qtw.QHBoxLayout()
hbox.addStretch()
btn = qtw.QPushButton('&Save', self)
btn.clicked.connect(self.on_ok)
btn.setDefault(True)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Cancel', self)
btn.clicked.connect(self.on_cancel)
hbox.addWidget(btn)
hbox.addStretch()
vbox.addLayout(hbox)
self.setLayout(vbox)
self.data_text.setFocus()
def on_cancel(self):
"""callback for cancel button (should be replaced by connecting to reject?)
"""
super().reject()
def on_ok(self):
"""confirm changed text
"""
self._parent.dialog_data = str(self.data_text.toPlainText())
super().accept()
class GridDialog(qtw.QDialog):
"""dialoog om stijl definities voor een (groep van) selector(s) op te voeren
of te wijzigen
"""
def __init__(self, parent, title='', itemlist=None): # , comment=False):
self._parent = parent
super().__init__(parent)
self.setWindowTitle(title)
## self.setWindowIcon(gui.QIcon(os.path.join(PPATH,"ashe.ico")))
vbox = qtw.QVBoxLayout()
sbox = qtw.QFrame()
sbox.setFrameStyle(qtw.QFrame.Box)
box = qtw.QVBoxLayout()
hbox = qtw.QHBoxLayout()
hbox.addStretch()
hbox.addWidget(qtw.QLabel("Items in table:", self))
hbox.addStretch()
box.addLayout(hbox)
hbox = qtw.QHBoxLayout()
self.attr_table = qtw.QTableWidget(self)
## self.attr_table.resize(540, 340)
self.attr_table.setColumnCount(2)
self.attr_table.setHorizontalHeaderLabels(['property', 'value']) # alleen zo te wijzigen
hdr = self.attr_table.horizontalHeader()
## hdr.setMinimumSectionSize(340)
hdr.resizeSection(0, 102)
hdr.resizeSection(1, 152)
hdr.setStretchLastSection(True)
self.attr_table.verticalHeader().setVisible(False)
self.attr_table.setTabKeyNavigation(False)
## self.attr_table.SetColSize(1, tbl.Size[0] - 162) # 178) # 160)
if itemlist is not None:
for attr, value in itemlist:
idx = self.attr_table.rowCount()
self.attr_table.insertRow(idx)
item = qtw.QTableWidgetItem(attr)
self.attr_table.setItem(idx, 0, item)
item = qtw.QTableWidgetItem(value)
self.attr_table.setItem(idx, 1, item)
else:
self.row = -1
## hbox.addStretch()
hbox.addWidget(self.attr_table)
## hbox.addStretch()
box.addLayout(hbox)
hbox = qtw.QHBoxLayout()
hbox.addSpacing(50)
btn = qtw.QPushButton('&Add Item', self)
btn.clicked.connect(self.on_add)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Delete Selected', self)
btn.clicked.connect(self.on_del)
hbox.addWidget(btn)
hbox.addSpacing(50)
box.addLayout(hbox)
sbox.setLayout(box)
vbox.addWidget(sbox)
hbox = qtw.QHBoxLayout()
hbox.addStretch()
btn = qtw.QPushButton('&Save', self)
btn.clicked.connect(self.on_ok)
btn.setDefault(True)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Cancel', self)
btn.clicked.connect(self.on_cancel)
hbox.addWidget(btn)
vbox.addLayout(hbox)
hbox.addStretch()
self.setLayout(vbox)
## def on_resize(self, evt=None):
## self.attr_table.SetColSize(1, self.attr_table.GetSize()[0] - 162) # 178) # 160)
## self.attr_table.ForceRefresh()
def on_add(self):
"""property toevoegen:
in dit geval hoef ik alleen maar een lege regel aan de tabel toe te voegen
"""
## self.attr_table.setFocus()
num = self.attr_table.rowCount()
self.attr_table.setRowCount(num + 1)
## self.attr_table.insertRow(idx) # waarom niet addRow?
## self.attr_table.setCurrentCell(idx, 0)
def on_del(self):
"""attribuut verwijderen
"""
ok = qtw.QMessageBox.question(self, 'Delete row from table', 'Are you sure?',
qtw.QMessageBox.Ok | qtw.QMessageBox.Cancel,
qtw.QMessageBox.Ok)
if ok == qtw.QMessageBox.Ok:
self.attr_table.removeRow(self.attr_table.currentRow())
def on_cancel(self):
"""callback for cancel button (should be replaced by connecting to reject?)
"""
## qtw.QDialog.done(self, qtw.QDialog.Rejected)
super().reject()
def on_ok(self):
"""controle bij OK aanklikken
"""
proplist = []
for i in range(self.attr_table.rowCount()):
name_item = self.attr_table.item(i, 0)
value_item = self.attr_table.item(i, 1)
if not name_item or not value_item:
qtw.QMessageBox.information(self, "Can't continue",
'Not all values are entered and confirmed')
return
proplist.append((str(name_item.text()), str(value_item.text())))
self._parent.dialog_data = proplist
## qtw.QDialog.done(self, qtw.QDialog.Accepted)
super().accept()
class ListDialog(qtw.QDialog):
"""dialoog om een list type property toe te voegen of te wijzigen
"""
def __init__(self, parent, title='', itemlist=None): # , comment=False):
self._parent = parent
super().__init__(parent)
self.setWindowTitle(title)
self.is_rules_node = "'rules'" in title
vbox = qtw.QVBoxLayout()
sbox = qtw.QFrame()
sbox.setFrameStyle(qtw.QFrame.Box)
box = qtw.QVBoxLayout()
hbox = qtw.QHBoxLayout()
hbox.addStretch()
hbox.addWidget(qtw.QLabel("Items in list:", self))
hbox.addStretch()
vbox.addLayout(hbox)
self.list = qtw.QListWidget(self)
if itemlist is not None:
self.list.addItems([self._parent.tree.get_itemtext(x) for x in itemlist])
hbox = qtw.QHBoxLayout()
hbox.addSpacing(50)
hbox.addWidget(self.list)
hbox.addSpacing(50)
box.addLayout(hbox)
hbox = qtw.QHBoxLayout()
hbox.addStretch()
btn = qtw.QPushButton('&Add Item', self)
btn.clicked.connect(self.on_add)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Edit Selected', self)
btn.clicked.connect(self.on_edit)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Delete Selected', self)
btn.clicked.connect(self.on_del)
hbox.addWidget(btn)
hbox.addStretch()
box.addLayout(hbox)
sbox.setLayout(box)
vbox.addWidget(sbox)
hbox = qtw.QHBoxLayout()
hbox.addStretch()
btn = qtw.QPushButton('&Save', self)
btn.clicked.connect(self.on_ok)
btn.setDefault(True)
hbox.addWidget(btn)
btn = qtw.QPushButton('&Cancel', self)
btn.clicked.connect(self.on_cancel)
hbox.addWidget(btn)
vbox.addLayout(hbox)
hbox.addStretch()
self.setLayout(vbox)
def on_add(self):
"item toevoegen"
if self.is_rules_node:
ruletypes = sorted([(x, y[0]) for x, y in ed.RTYPES.items()],
key=lambda item: item[1])
options = [x[1] for x in ruletypes]
text, ok = qtw.QInputDialog.getItem(
self, self._parent.app_title, "Choose type for this rule", options,
editable=False)
else:
text, ok = qtw.QInputDialog.getText(
self, 'Add item to list', 'Enter text for this item')
self.list.addItem(text)
def on_edit(self):
"item wijzigen"
current = self.list.currentItem()
oldtext = current.text()
if self.is_rules_node:
ruletypes = sorted([(x, y[0]) for x, y in ed.RTYPES.items()],
key=lambda item: item[1])
options = [x[1] for x in ruletypes]
current_index = options.index(oldtext) if oldtext else 0
text, ok = qtw.QInputDialog.getItem(
self, self._parent.app_title, "Choose type for this rule", options,
current_index, editable=False)
else:
text, ok = qtw.QInputDialog.getText(
self, 'Edit list item', 'Enter text for this item:', text=oldtext)
if ok and text != oldtext:
current.setText(text)
def on_del(self):
"item verwijderen"
ok = qtw.QMessageBox.question(self, 'Delete item from list', 'Are you sure?',
qtw.QMessageBox.Ok | qtw.QMessageBox.Cancel,
qtw.QMessageBox.Ok)
if ok == qtw.QMessageBox.Ok:
self.list.takeItem(self.list.currentRow())
def on_cancel(self):
"""callback for cancel button (should be replaced by connecting to reject?)
"""
## qtw.QDialog.done(self, qtw.QDialog.Rejected)
super().reject()
def on_ok(self):
"""bij OK: de opgebouwde list via self.dialog_data doorgeven
aan het mainwindow
"""
list_data = []
for row in range(self.list.count()):
list_data.append(str(self.list.item(row).text()))
self._parent.dialog_data = list_data
## qtw.QDialog.done(self, qtw.QDialog.Accepted)
super().accept()
| 35.302292 | 97 | 0.582363 | 24,420 | 0.991031 | 0 | 0 | 1,594 | 0.064689 | 0 | 0 | 7,516 | 0.30502 |
fb2b12951a9311d135394c012f89a0e99ed10eee
| 13,425 |
py
|
Python
|
mtp_cashbook/apps/disbursements/tests/test_functional.py
|
uk-gov-mirror/ministryofjustice.money-to-prisoners-cashbook
|
d35a621e21631e577faacaeacb5ab9f883c9b4f4
|
[
"MIT"
] | 4 |
2016-01-05T12:21:39.000Z
|
2016-12-22T15:56:37.000Z
|
mtp_cashbook/apps/disbursements/tests/test_functional.py
|
uk-gov-mirror/ministryofjustice.money-to-prisoners-cashbook
|
d35a621e21631e577faacaeacb5ab9f883c9b4f4
|
[
"MIT"
] | 132 |
2015-06-10T09:53:14.000Z
|
2022-02-01T17:35:54.000Z
|
mtp_cashbook/apps/disbursements/tests/test_functional.py
|
uk-gov-mirror/ministryofjustice.money-to-prisoners-cashbook
|
d35a621e21631e577faacaeacb5ab9f883c9b4f4
|
[
"MIT"
] | 3 |
2015-07-07T14:40:33.000Z
|
2021-04-11T06:20:14.000Z
|
from django.utils.crypto import get_random_string
from cashbook.tests.test_functional import CashbookTestCase
from disbursements.templatetags.disbursements import format_sortcode
class DisbursementTestCase(CashbookTestCase):
def tearDown(self):
self.click_logout()
def click_button(self, text=None):
buttons = self.driver.find_elements_by_class_name('govuk-button')
if text:
button = None
for button in buttons:
if button.text.strip() == text or button.get_attribute('value') == text:
break
button = None
self.assertIsNotNone(button)
else:
self.assertEqual(len(buttons), 1)
button = buttons[0]
button.click()
def test_create_bank_transfer_disbursement(self):
self.login(self.username, self.username)
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.assertShowingView('disbursements:start')
self.click_button('Start now')
self.assertShowingView('disbursements:sending_method')
self.click_on_text_substring('Bank transfer')
self.click_button('Next')
self.assertShowingView('disbursements:prisoner')
self.fill_in_form({
'id_prisoner_number': 'A1401AE',
})
self.click_button('Next')
self.assertShowingView('disbursements:prisoner_check')
self.assertInSource('JILLY HALL')
self.click_on_text_substring('Yes')
self.click_button('Next')
self.assertShowingView('disbursements:amount')
self.fill_in_form({
'id_amount': '11a',
})
self.click_button('Next')
self.assertShowingView('disbursements:amount')
self.assertInSource('Enter amount as a number')
self.get_element('id_amount').clear()
self.fill_in_form({
'id_amount': '11',
})
self.click_button('Next')
self.assertShowingView('disbursements:recipient_contact')
contact_form = {
'id_recipient_first_name': 'Mary-' + get_random_string(3),
'id_recipient_last_name': 'Halls-' + get_random_string(3),
'id_recipient_email': 'mary-halls-' + get_random_string(3) + '@outside.local',
}
self.fill_in_form(contact_form)
self.click_button('Next')
self.assertShowingView('disbursements:recipient_postcode')
postcode_form = {
'id_postcode': 'PostCode-' + get_random_string(3),
}
self.fill_in_form(postcode_form)
self.click_button('Find address')
self.assertShowingView('disbursements:recipient_address')
address_form = {
'id_address_line1': 'Street-' + get_random_string(3),
'id_city': 'City-' + get_random_string(3),
'id_postcode': 'PostCode-' + get_random_string(3),
}
self.fill_in_form(address_form)
self.click_button('Next')
self.assertShowingView('disbursements:recipient_bank_account')
self.assertInSource('%s %s' % (contact_form['id_recipient_first_name'],
contact_form['id_recipient_last_name']))
bank_account = {
'id_sort_code': get_random_string(6, '0123456789'),
'id_account_number': get_random_string(8, '0123456789'),
}
self.fill_in_form(bank_account)
self.click_button('Next')
self.assertShowingView('disbursements:remittance_description')
self.click_on_text_substring('Yes')
self.type_in('id_remittance_description', 'LEGAL FEES')
self.click_button('Next')
self.assertShowingView('disbursements:details_check')
self.assertInSource('Bank transfer')
self.assertInSource('£11.00')
for key in ('id_recipient_first_name', 'id_recipient_last_name',
'id_recipient_email'):
self.assertInSource(contact_form[key])
for key in ('id_address_line1', 'id_city'):
self.assertInSource(address_form[key])
self.assertInSource(address_form['id_postcode'].upper())
self.assertInSource(format_sortcode(bank_account['id_sort_code']))
self.assertInSource(bank_account['id_account_number'])
self.assertInSource('JILLY HALL')
self.assertInSource('A1401AE')
self.assertInSource('LEGAL FEES')
self.click_on_text_substring('No')
try:
self.click_button('Next')
except AssertionError:
pass
self.assertShowingView('disbursements:details_check')
self.click_on_text_substring('Yes')
self.click_button('Next')
self.assertShowingView('disbursements:handover')
self.click_button()
self.assertShowingView('disbursements:created')
self.assertInSource('request is ready for your colleague')
self.click_logout()
# log in as another user to confirm request
self.login(self.username + '-ua', self.username + '-ua')
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.click_on_text('Confirm payments')
self.assertShowingView('disbursements:pending_list')
self.assertInSource(contact_form['id_recipient_first_name'])
self.assertInSource(contact_form['id_recipient_last_name'])
self.driver.find_element_by_xpath(
'//*[text()[contains(.,"' + contact_form['id_recipient_first_name'] + '")]]/..'
).find_element_by_class_name('govuk-button').click()
self.assertShowingView('disbursements:pending_detail')
self.assertInSource(contact_form['id_recipient_first_name'])
self.assertInSource('entered by HMP Leeds Clerk')
self.click_on_text_substring('Yes')
self.click_button('Confirm payment')
self.assertShowingView('disbursements:confirmed')
self.assertInSource('request sent to SSCL')
self.click_on_text_substring('Confirm another payment')
self.assertShowingView('disbursements:pending_list')
self.assertNotInSource(contact_form['id_recipient_first_name'])
self.click_logout()
# search for new request
self.login(self.username, self.username)
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.click_on_link('Payments made')
self.assertShowingView('disbursements:search')
self.click_on_link('Recipient')
self.type_in('id_recipient_name', contact_form['id_recipient_first_name'], send_return=True)
self.assertShowingView('disbursements:search')
self.assertNotInSource('There was a problem')
self.get_element('id_recipient_name').clear()
self.click_on_link('Recipient')
self.assertInSource('Entered by HMP Leeds Clerk')
self.assertInSource('Confirmed by HMP Leeds Clerk')
self.assertInSource('Bank transfer')
self.assertInSource('£11.00')
for key in ('id_recipient_first_name', 'id_recipient_last_name', 'id_address_line1', 'id_city'):
self.assertInSource(contact_form[key])
self.assertInSource(contact_form['id_postcode'].upper())
self.assertInSource(format_sortcode(bank_account['id_sort_code']))
self.assertInSource(bank_account['id_account_number'])
self.assertInSource('JILLY HALL')
self.assertInSource('A1401AE')
def test_create_cheque_disbursement_to_company(self):
self.login(self.username, self.username)
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.assertShowingView('disbursements:start')
self.click_button('Start now')
self.assertShowingView('disbursements:sending_method')
self.click_on_text_substring('Cheque')
self.click_button('Next')
self.assertShowingView('disbursements:prisoner')
self.fill_in_form({
'id_prisoner_number': 'A1401AE',
})
self.click_button('Next')
self.assertShowingView('disbursements:prisoner_check')
self.assertInSource('JILLY HALL')
self.click_on_text_substring('Yes')
self.click_button('Next')
self.assertShowingView('disbursements:amount')
self.fill_in_form({
'id_amount': '11a',
})
self.click_button('Next')
self.assertShowingView('disbursements:amount')
self.assertInSource('Enter amount as a number')
self.get_element('id_amount').clear()
self.fill_in_form({
'id_amount': '11',
})
self.click_button('Next')
self.assertShowingView('disbursements:recipient_contact')
self.click_on_text_substring('Company')
contact_form = {
'id_recipient_company_name': 'Boots-' + get_random_string(3),
}
self.fill_in_form(contact_form)
self.click_button('Next')
self.assertShowingView('disbursements:recipient_postcode')
postcode_form = {
'id_postcode': 'PostCode-' + get_random_string(3),
}
self.fill_in_form(postcode_form)
self.click_button('Find address')
self.assertShowingView('disbursements:recipient_address')
address_form = {
'id_address_line1': 'Street-' + get_random_string(3),
'id_city': 'City-' + get_random_string(3),
'id_postcode': 'PostCode-' + get_random_string(3),
}
self.fill_in_form(address_form)
self.click_button('Next')
self.assertShowingView('disbursements:remittance_description')
self.click_on_text_substring('Yes')
self.type_in('id_remittance_description', 'LEGAL FEES')
self.click_on_text_substring('No')
self.click_button('Next')
self.assertShowingView('disbursements:details_check')
self.assertInSource('Cheque')
self.assertInSource('£11.00')
for key in ('id_recipient_company_name',):
self.assertInSource(contact_form[key])
for key in ('id_address_line1', 'id_city'):
self.assertInSource(address_form[key])
self.assertInSource(address_form['id_postcode'].upper())
self.assertInSource('Company:')
self.assertInSource('JILLY HALL')
self.assertInSource('A1401AE')
self.assertNotInSource('LEGAL FEES')
self.assertInSource('Payment from JILLY HALL')
self.click_on_text_substring('No')
try:
self.click_button('Next')
except AssertionError:
pass
self.assertShowingView('disbursements:details_check')
self.click_on_text_substring('Yes')
self.click_button('Next')
self.assertShowingView('disbursements:handover')
self.get_element('.govuk-button').click()
self.assertShowingView('disbursements:created')
self.assertInSource('request is ready for your colleague')
self.click_logout()
# log in as another user to confirm request
self.login(self.username + '-ua', self.username + '-ua')
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.click_on_text('Confirm payments')
self.assertShowingView('disbursements:pending_list')
self.assertInSource(contact_form['id_recipient_first_name'])
self.assertInSource(contact_form['id_recipient_last_name'])
self.driver.find_element_by_xpath(
'//*[text()[contains(.,"' + contact_form['id_recipient_first_name'] + '")]]/..'
).find_element_by_class_name('govuk-button').click()
self.assertShowingView('disbursements:pending_detail')
self.assertInSource(contact_form['id_recipient_first_name'])
self.assertInSource('entered by HMP Leeds Clerk')
self.click_on_text_substring('Yes')
self.click_button('Confirm payment')
self.assertShowingView('disbursements:confirmed')
self.assertInSource('request sent to SSCL')
self.click_on_text_substring('Confirm another payment')
self.assertShowingView('disbursements:pending_list')
self.assertNotInSource(contact_form['id_recipient_company_name'])
self.click_logout()
# search for new request
self.login(self.username, self.username)
self.assertShowingView('home')
self.click_on_text('Digital disbursements')
self.click_on_link('Payments made')
self.assertShowingView('disbursements:search')
self.click_on_link('Recipient')
self.type_in('id_recipient_name', contact_form['id_recipient_company_name'], send_return=True)
self.assertShowingView('disbursements:search')
self.assertNotInSource('There was a problem')
self.get_element('id_recipient_name').clear()
self.click_on_link('Recipient')
self.assertInSource('Entered by HMP Leeds Clerk')
self.assertInSource('Confirmed by HMP Leeds Clerk')
self.assertInSource('Cheque')
self.assertInSource('£11.00')
for key in ('id_recipient_company_name', 'id_address_line1', 'id_city'):
self.assertInSource(contact_form[key])
self.assertInSource(contact_form['id_postcode'].upper())
self.assertInSource('JILLY HALL')
self.assertInSource('A1401AE')
| 39.60177 | 104 | 0.659367 | 13,246 | 0.986373 | 0 | 0 | 0 | 0 | 0 | 0 | 4,120 | 0.306799 |
fb2bb261a73c74317e9f4c04091925e5a2fa1f5e
| 2,199 |
py
|
Python
|
quantrocket/satellite.py
|
Jay-Jay-D/quantrocket-client
|
b70ac199382d22d56fad923ca2233ce027f3264a
|
[
"Apache-2.0"
] | null | null | null |
quantrocket/satellite.py
|
Jay-Jay-D/quantrocket-client
|
b70ac199382d22d56fad923ca2233ce027f3264a
|
[
"Apache-2.0"
] | null | null | null |
quantrocket/satellite.py
|
Jay-Jay-D/quantrocket-client
|
b70ac199382d22d56fad923ca2233ce027f3264a
|
[
"Apache-2.0"
] | 1 |
2019-06-12T11:34:27.000Z
|
2019-06-12T11:34:27.000Z
|
# Copyright 2017 QuantRocket - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from quantrocket.houston import houston
from quantrocket.cli.utils.output import json_to_cli
from quantrocket.cli.utils.files import write_response_to_filepath_or_buffer
def execute_command(service, cmd, return_file=None, filepath_or_buffer=None):
"""
Execute an abitrary command on a satellite service and optionally return a file.
Parameters
----------
service : str, required
the service name
cmd: str, required
the command to run
return_file : str, optional
the path of a file to be returned after the command completes
filepath_or_buffer : str, optional
the location to write the return_file (omit to write to stdout)
Returns
-------
dict or None
None if return_file, otherwise status message
"""
params = {}
if not service:
raise ValueError("a service is required")
if not cmd:
raise ValueError("a command is required")
params["cmd"] = cmd
if return_file:
params["return_file"] = return_file
if not service.startswith("satellite"):
raise ValueError("service must start with 'satellite'")
response = houston.post("/{0}/commands".format(service), params=params, timeout=60*60*24)
houston.raise_for_status_with_json(response)
if return_file:
filepath_or_buffer = filepath_or_buffer or sys.stdout
write_response_to_filepath_or_buffer(filepath_or_buffer, response)
else:
return response.json()
def _cli_execute_command(*args, **kwargs):
return json_to_cli(execute_command, *args, **kwargs)
| 32.820896 | 93 | 0.71578 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,253 | 0.569804 |
fb2ea9cb95f216543b39462cef45459a422c0631
| 12,367 |
py
|
Python
|
blog_server/app/comm/GeneralOperate.py
|
szhu9903/flask-react-blog
|
b1939a5d95e0084a82c230f2a20a9b197d2eef46
|
[
"MIT"
] | 2 |
2022-03-12T14:51:42.000Z
|
2022-03-25T13:20:16.000Z
|
blog_server/app/comm/GeneralOperate.py
|
szhu9903/flask-react-blog
|
b1939a5d95e0084a82c230f2a20a9b197d2eef46
|
[
"MIT"
] | 7 |
2022-03-19T02:17:54.000Z
|
2022-03-28T10:12:52.000Z
|
blog_server/app/comm/GeneralOperate.py
|
szhu9903/flask-react-blog
|
b1939a5d95e0084a82c230f2a20a9b197d2eef46
|
[
"MIT"
] | 1 |
2022-03-25T13:20:28.000Z
|
2022-03-25T13:20:28.000Z
|
import copy
from flask import g
from app.comm.TableModule import TableModule
from app.comm.SqlExecute import SqlExecute
from app.unit_config import default_result, default_limit_size, depth_post_map
class GeneralOperate(object):
def __init__(self, module:TableModule):
self.module = module
# 请求参数检查链
self.get_deal_func_link = []
self.post_deal_func_link = []
self.put_deal_func_link = []
self.delete_deal_func_link = []
self.init_check_func_link()
# 初始化请求处理链
def init_check_func_link(self):
self.get_deal_func_link.extend([
self.check_get_permissions,
self.check_view_param,
self.check_pagination_param,
self.before_deal_get,
self.deal_get_data,
self.after_deal_get,
])
self.post_deal_func_link.extend([
self.check_operation_permissions,
self.check_request_data,
self.check_column_data,
self.before_deal_post,
self.deal_post_data,
self.after_deal_post,
])
self.put_deal_func_link.extend([
self.check_operation_permissions,
self.check_request_data,
self.check_unique_record,
self.check_column_data,
self.before_deal_put,
self.deal_put_data,
self.after_deal_put,
])
self.delete_deal_func_link.extend([
self.check_operation_permissions,
self.check_unique_record,
self.before_deal_delete,
self.deal_delete_data,
self.after_deal_delete,
])
# 通用 - 初始化请求参数解析
def init_general_data(self, request):
g.args_data = request.args.to_dict() # url参数
g.is_continue_exec = True # 检查链执行结果
if request.method in ["POST", "PUT"]:
g.json_data = request.json
g.view_args = request.view_args # 动态url路径参数
# 处理GET请求
def deal_get_method(self, request):
# 初始化参数
self.init_general_data(request)
# 执行处理函数链
for deal_func in self.get_deal_func_link:
deal_func()
# 出现异常退出检查链
if not g.is_continue_exec:
break
return g.result
# 处理GET请求 -> 检查权限
def check_get_permissions(self):
return
# 处理GET请求 -> 检查视图参数
def check_view_param(self):
view = g.args_data.get("view")
if view:
if (not self.module.view_list) or (view not in self.module.view_list):
g.is_continue_exec = False
g.result['message'] = f"view({view}):视图不存在! "
# 处理GET请求 -> 检查分页参数
def check_pagination_param(self):
pagination = g.args_data.get("pagination")
if pagination:
page_index, page_size = pagination.split(',')
if (not page_index.isdigit()) and (not page_size.isdigit()):
g.is_continue_exec = False
g.result['message'] = f"pagination({pagination}):分页数据错误,参考格式:index,size! "
# 处理GET请求 -> 执行get获取数据前操作
def before_deal_get(self):
return
# 处理GET请求 -> 执行get获取数据
def deal_get_data(self):
# 获取总数
self.filter_str, self.filter_args = self.get_filter_str()
total_count = self.query_total_count()
if not g.is_continue_exec:
g.result['message'] = '获取数据量失败!'
return
g.result['total_count'] = total_count
# 获取查询结果集
data = self.query_data()
if not g.is_continue_exec:
g.result['message'] = '获取结果集失败!'
return
g.result['data'] = data
# 处理GET请求 -> 执行get获取数据 -> 查询数据总量
def query_total_count(self):
view = g.args_data.get('view')
if view:
count_query = self.module.views_query[view]['sql_query_count']
else:
count_query = self.module.sql_count_default
count_query = f'{count_query} {self.filter_str}'
total_count = SqlExecute.query_sql_data(count_query, self.filter_args)
return total_count[0]['total_count'] if g.is_continue_exec else None
# 处理GET请求 -> 执行get获取数据 -> 获取数据集
def query_data(self):
default_sql = self.get_default_sql()
order_str = self.get_order_str()
pagination_str = self.get_pagination_str()
sql_default_query = f'{default_sql} {self.filter_str} {order_str} {pagination_str}'
# 获取数据集
data = SqlExecute.query_sql_data(sql_default_query, self.filter_args)
return data if g.is_continue_exec else None
# 处理GET请求 -> 执行get获取数据 -> 获取数据集 -> 获取默认查询语句
def get_default_sql(self):
view = g.args_data.get('view')
if view:
sql_query = self.module.views_query[view]['sql_query']
else:
sql_query = self.module.sql_query_default
return sql_query
# 处理GET请求 -> 执行get获取数据 -> 获取数据集 -> 获取过滤条件str 和 参数
def get_filter_str(self):
record_id = g.view_args.get('record_id')
filter_param = g.args_data.get('filter')
fuzzy_filter_param = g.args_data.get('fuzzyfilter')
filter_str = "where 1=1 "
filter_args = dict()
# 获取详细数据
if record_id:
filter_str += "and id=%(id)s "
filter_args['id'] = record_id
return (filter_str, filter_args)
# 模糊条件
if fuzzy_filter_param:
fuzzy_filter_list = fuzzy_filter_param.split(',')
for fuzzy_filter in fuzzy_filter_list:
fuzzy_key, fuzzy_val = fuzzy_filter.split('=')
filter_str += f"and ({fuzzy_key} like %({fuzzy_key})s) "
filter_args[fuzzy_key] = f"%{fuzzy_val}%"
# 精准条件查询
if filter_param:
filter_args_list = filter_param.split(',')
for filter in filter_args_list:
filter_key, filter_val = filter.split('=')
if filter_key in filter_args.keys(): continue
filter_str += f"and ({filter_key}=%({filter_key})s) "
filter_args[filter_key] = filter_val
return (filter_str, filter_args)
# 处理GET请求 -> 执行get获取数据 -> 获取数据集 -> 获取排序str
def get_order_str(self):
# 排序条件
order_param = g.args_data.get('order')
order_str = ""
if order_param:
order_str = order_param.replace('|', ',')
order_str = 'order by %s' % (order_str)
return order_str
# 处理GET请求 -> 执行get获取数据 -> 获取数据集 -> 获取分页参数str
def get_pagination_str(self):
# 获取分页语句
pagination_param = g.args_data.get('pagination')
pagination_str = ""
if pagination_param:
page_index, page_size = pagination_param.split(',')
page_index = 1 if int(page_index) < 1 else int(page_index)
page_size = 1 if int(page_size) < 1 else int(page_size)
pagination_str = 'limit %d, %d' % ((page_index - 1) * page_size, page_size)
return pagination_str if pagination_str else f"limit {default_limit_size}"
# 处理GET请求 -> get获取数据后处理
def after_deal_get(self):
return
# 处理POST请求
def deal_post_method(self, request):
# 初始化参数
self.init_general_data(request)
# 执行链
for deal_func in self.post_deal_func_link:
deal_func()
# 出现异常退出检查链
if not g.is_continue_exec:
break
return g.result
# [POST、PUT、DELETE] 操作权限检查
def check_operation_permissions(self):
return
# [POST、PUT]检查请求提交数据结构体
def check_request_data(self):
if not g.json_data:
g.is_continue_exec = False
g.result["message"] = '无要提交的数据~'
if 'data' not in g.json_data.keys():
g.is_continue_exec = False
g.result["message"] = '参数不完整:缺少data参数~'
# [POST、PUT]检查有效数据列合法性,清除无效数据
def check_column_data(self):
req_data = g.json_data["data"]
table_column = self.module.colnames
req_data_keys = list(req_data.keys())
for data_key in req_data_keys:
if (req_data[data_key] is None) or (len(str(req_data[data_key])) == 0):
del req_data[data_key]
continue
if (data_key not in table_column) and (data_key not in depth_post_map):
g.is_continue_exec = False
g.result['code'] = 0x11
g.result["message"] = f'非法列名:{data_key}~'
# POST 提交数据前操作
def before_deal_post(self):
return
# POST 提交数据
def deal_post_data(self):
sqlExecute = SqlExecute()
self.transact_post_before(sqlExecute)
if not g.is_continue_exec:
return
self.transact_post(sqlExecute)
if not g.is_continue_exec:
return
self.transact_post_after(sqlExecute)
if not g.is_continue_exec:
return
sqlExecute.commit()
# 插入数据前事务
def transact_post_before(self, cursor):
return
# 插入post数据
def transact_post(self, cursor):
insert_data = g.json_data['data'].copy()
if g.json_data.get("type", None) == "replace":
sql_insert = self.module.get_insert_sql(insert_data, is_replace=True)
else:
sql_insert = self.module.get_insert_sql(insert_data)
insert_data_keys = list(insert_data.keys())
for col_name in insert_data_keys:
if col_name in depth_post_map: del insert_data[col_name]
rowid = cursor.transact_commit_sql_data(sql_insert, insert_data)
g.result['rowid'] = rowid
# 插入数据后事务
def transact_post_after(self, cursor):
return
# POST 提交后操作
def after_deal_post(self):
return
# 处理PUT请求
def deal_put_method(self, request):
# 初始化参数
self.init_general_data(request)
# 执行链
for deal_func in self.put_deal_func_link:
deal_func()
# 出现异常退出检查链
if not g.is_continue_exec:
break
return g.result
# [PUT、DELETE] 修改删除记录前,检查记录唯一性
def check_unique_record(self):
record = g.view_args['record_id']
query_sql = f" {self.module.sql_query_default} where id={record}"
data = SqlExecute.query_sql_data(query_sql)
if not g.is_continue_exec:
return
if len(data) == 0:
g.is_continue_exec = False
g.result["message"] = "未匹配到要操作的数据"
return
# PUT 提交数据前操作
def before_deal_put(self):
return
# PUT 提交数据
def deal_put_data(self):
sqlExecute = SqlExecute()
self.transact_put_before(sqlExecute)
if not g.is_continue_exec:
return
self.transact_put(sqlExecute)
if not g.is_continue_exec:
return
self.transact_put_after(sqlExecute)
if not g.is_continue_exec:
return
sqlExecute.commit()
# put 事务中提交前
def transact_put_before(self, cursor):
pass
# put 事务提交
def transact_put(self, cursor):
record_id = g.view_args['record_id']
update_data = g.json_data['data'].copy()
sql_update = self.module.get_update_sql(update_data, record_id)
insert_data_keys = list(update_data.keys())
for col_name in insert_data_keys:
if col_name in depth_post_map: del update_data[col_name]
cursor.transact_commit_sql_data(sql_update, update_data)
if not g.is_continue_exec:
return
g.result['rowid'] = record_id
# put 事务中提交后
def transact_put_after(self, cursor):
pass
# PUT 提交后操作
def after_deal_put(self):
return
# 处理DELETE请求
def deal_delete_method(self, request):
# 初始化参数
self.init_general_data(request)
# 执行链
for deal_func in self.delete_deal_func_link:
deal_func()
# 出现异常退出检查链
if not g.is_continue_exec:
break
return g.result
# DELETE 删除数据前操作
def before_deal_delete(self):
return
# DELETE 删除数据
def deal_delete_data(self):
record_id = g.view_args['record_id']
sql_delete = self.module.get_delete_sql(record_id)
SqlExecute.commit_sql_data(sql_delete)
if not g.is_continue_exec:
return
g.result['rowid'] = record_id
# DELETE 删除后操作
def after_deal_delete(self):
return
| 32.890957 | 91 | 0.602248 | 13,278 | 0.985088 | 0 | 0 | 0 | 0 | 0 | 0 | 2,857 | 0.211959 |
fb2eaccf39fb8ba5a32d82bc3c5d4475f3435246
| 57 |
py
|
Python
|
tests/helpers/test_plotting.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
tests/helpers/test_plotting.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
tests/helpers/test_plotting.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
"""No identified need to test plotting functionality."""
| 28.5 | 56 | 0.754386 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 56 | 0.982456 |
fb2ec99c88e89e1bd90620cd1221b2fc2ec8cc12
| 4,921 |
py
|
Python
|
SmartDeal-Training/models/mobilenetv2_se_mask.py
|
VITA-Group/SmartDeal
|
8e1de77497eedbeea412a8c51142834c28a53709
|
[
"MIT"
] | 2 |
2021-07-20T02:48:35.000Z
|
2021-11-29T02:55:36.000Z
|
SmartDeal-Training/models/mobilenetv2_se_mask.py
|
VITA-Group/SmartDeal
|
8e1de77497eedbeea412a8c51142834c28a53709
|
[
"MIT"
] | null | null | null |
SmartDeal-Training/models/mobilenetv2_se_mask.py
|
VITA-Group/SmartDeal
|
8e1de77497eedbeea412a8c51142834c28a53709
|
[
"MIT"
] | null | null | null |
'''MobileNetV2 in PyTorch.
See the paper "Inverted Residuals and Linear Bottlenecks:
Mobile Networks for Classification, Detection and Segmentation" for more details.
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from se import SEConv2d, SELinear
THRESHOLD = 4e-3
__all__ = ['SEMaskMobileNetV2']
def conv3x3(in_planes, out_planes, stride=1, groups=1):
"""3x3 convolution with padding"""
# return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
return SEConv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, groups=groups, bias=False, threshold=THRESHOLD)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
# return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
return SEConv2d(in_planes, out_planes, kernel_size=1, stride=stride,
bias=False, threshold=THRESHOLD)
class Block(nn.Module):
'''expand + depthwise + pointwise'''
def __init__(self, in_planes, out_planes, expansion, stride):
super(Block, self).__init__()
self.stride = stride
planes = expansion * in_planes
# self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, stride=1, padding=0, bias=False)
# self.bn1 = nn.BatchNorm2d(planes)
# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, groups=planes, bias=False)
# self.bn2 = nn.BatchNorm2d(planes)
# self.conv3 = nn.Conv2d(planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False)
# self.bn3 = nn.BatchNorm2d(out_planes)
self.conv1 = conv1x1(in_planes, planes, stride=1)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride=stride, groups=planes)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = conv1x1(planes, out_planes, stride=1)
self.bn3 = nn.BatchNorm2d(out_planes)
self.shortcut = nn.Sequential()
if stride == 1 and in_planes != out_planes:
self.shortcut = nn.Sequential(
# nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False),
conv1x1(in_planes, out_planes, stride=1),
nn.BatchNorm2d(out_planes),
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out = out + self.shortcut(x) if self.stride==1 else out
return out
class SEMaskMobileNetV2(nn.Module):
# (expansion, out_planes, num_blocks, stride)
cfg = [(1, 16, 1, 1),
(6, 24, 2, 1), # NOTE: change stride 2 -> 1 for CIFAR10
(6, 32, 3, 2),
(6, 64, 4, 2),
(6, 96, 3, 1),
(6, 160, 3, 2),
(6, 320, 1, 1)]
def __init__(self, num_classes=10, threshold=4e-3):
super(SEMaskMobileNetV2, self).__init__()
# NOTE: change conv1 stride 2 -> 1 for CIFAR10
# self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False)
# self.bn1 = nn.BatchNorm2d(32)
global THRESHOLD
THRESHOLD = threshold
print('threshold is set to {}'.format(threshold))
self.conv1 = conv3x3(3, 32, stride=1)
self.bn1 = nn.BatchNorm2d(32)
self.layers = self._make_layers(in_planes=32)
# self.conv2 = nn.Conv2d(320, 1280, kernel_size=1, stride=1, padding=0, bias=False)
# self.bn2 = nn.BatchNorm2d(1280)
self.conv2 = conv1x1(320, 1280, stride=1)
self.bn2 = nn.BatchNorm2d(1280)
# self.linear = nn.Linear(1280, num_classes)
self.linear = SELinear(1280, num_classes, threshold=THRESHOLD)
def _make_layers(self, in_planes):
layers = []
for expansion, out_planes, num_blocks, stride in self.cfg:
strides = [stride] + [1]*(num_blocks-1)
for stride in strides:
layers.append(Block(in_planes, out_planes, expansion, stride))
in_planes = out_planes
return nn.Sequential(*layers)
def forward(self, x, return_linear_input=False):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layers(out)
out = F.relu(self.bn2(self.conv2(out)))
# NOTE: change pooling kernel_size 7 -> 4 for CIFAR10
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
if return_linear_input:
linear_input = out
out = self.linear(out)
if return_linear_input:
return out, linear_input
else:
return out
def test():
net = SEMaskMobileNetV2()
i = 0
for m in net.modules():
if hasattr(m, 'mask'):
print(m.C.numel())
pass
x = torch.randn(2,3,32,32)
y = net(x)
print(y.size())
# test()
if __name__ == "__main__":
test()
| 36.451852 | 116 | 0.613696 | 3,700 | 0.75188 | 0 | 0 | 0 | 0 | 0 | 0 | 1,420 | 0.288559 |
fb2f3eb592211892766f130ad1032ee5dd774617
| 911 |
py
|
Python
|
twitter_video.py
|
keselekpermen69/build_scripts
|
110392778ad0a8585efa944100aa1c13ef28469e
|
[
"MIT"
] | 5 |
2020-08-19T05:44:25.000Z
|
2021-05-13T05:15:50.000Z
|
twitter_video.py
|
MrMissx/scripts
|
110392778ad0a8585efa944100aa1c13ef28469e
|
[
"MIT"
] | null | null | null |
twitter_video.py
|
MrMissx/scripts
|
110392778ad0a8585efa944100aa1c13ef28469e
|
[
"MIT"
] | null | null | null |
import requests
def get_link():
return input("Give me a twitter video link: ")
def download(url: str):
name = url.rsplit('/')[-1]
if("?tag" in name):
name = name.split('?')[0]
r = requests.get(url, allow_redirects=True)
open(name, 'wb').write(r.content)
def main():
payload = {'url': get_link()}
response = requests.request("POST",
"http://sosmeeed.herokuapp.com:80/api/twitter/video",
data=payload)
if response.status_code != 200:
print("Can't fetch video!")
return
res = response.json()
if not res["success"]:
print("Error! Please input correct URL")
return
url = res["data"]["data"][0]["link"] # use the highest quality
print(f"Downloading ({url})...")
download(url)
if __name__ == "__main__":
main()
| 24.621622 | 86 | 0.531284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.276619 |
fb2fabace401a8d0a972f811af8b0a86ed348c85
| 2,951 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/regional/india/utils.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/regional/india/utils.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/regional/india/utils.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
import frappe, re
from frappe import _
from frappe.utils import cstr
from erpnext.regional.india import states, state_numbers
from erpnext.controllers.taxes_and_totals import get_itemised_tax, get_itemised_taxable_amount
def validate_gstin_for_india(doc, method):
if not hasattr(doc, 'gstin'):
return
if doc.gstin:
doc.gstin = doc.gstin.upper()
if doc.gstin != "NA":
p = re.compile("[0-9]{2}[a-zA-Z]{5}[0-9]{4}[a-zA-Z]{1}[1-9A-Za-z]{1}[Z]{1}[0-9a-zA-Z]{1}")
if not p.match(doc.gstin):
frappe.throw(_("Invalid GSTIN or Enter NA for Unregistered"))
if not doc.gst_state:
if doc.state in states:
doc.gst_state = doc.state
if doc.gst_state:
doc.gst_state_number = state_numbers[doc.gst_state]
if doc.gstin and doc.gstin != "NA" and doc.gst_state_number != doc.gstin[:2]:
frappe.throw(_("First 2 digits of GSTIN should match with State number {0}")
.format(doc.gst_state_number))
def get_itemised_tax_breakup_header(item_doctype, tax_accounts):
if frappe.get_meta(item_doctype).has_field('gst_hsn_code'):
return [_("HSN/SAC"), _("Taxable Amount")] + tax_accounts
else:
return [_("Item"), _("Taxable Amount")] + tax_accounts
def get_itemised_tax_breakup_data(doc):
itemised_tax = get_itemised_tax(doc.taxes)
itemised_taxable_amount = get_itemised_taxable_amount(doc.items)
if not frappe.get_meta(doc.doctype + " Item").has_field('gst_hsn_code'):
return itemised_tax, itemised_taxable_amount
item_hsn_map = frappe._dict()
for d in doc.items:
item_hsn_map.setdefault(d.item_code or d.item_name, d.get("gst_hsn_code"))
hsn_tax = {}
for item, taxes in itemised_tax.items():
hsn_code = item_hsn_map.get(item)
hsn_tax.setdefault(hsn_code, frappe._dict())
for tax_account, tax_detail in taxes.items():
hsn_tax[hsn_code].setdefault(tax_account, {"tax_rate": 0, "tax_amount": 0})
hsn_tax[hsn_code][tax_account]["tax_rate"] = tax_detail.get("tax_rate")
hsn_tax[hsn_code][tax_account]["tax_amount"] += tax_detail.get("tax_amount")
# set taxable amount
hsn_taxable_amount = frappe._dict()
for item, taxable_amount in itemised_taxable_amount.items():
hsn_code = item_hsn_map.get(item)
hsn_taxable_amount.setdefault(hsn_code, 0)
hsn_taxable_amount[hsn_code] += itemised_taxable_amount.get(item)
return hsn_tax, hsn_taxable_amount
def set_place_of_supply(doc, method):
if not frappe.get_meta('Address').has_field('gst_state'): return
if doc.doctype in ("Sales Invoice", "Delivery Note"):
address_name = doc.shipping_address_name or doc.customer_address
elif doc.doctype == "Purchase Invoice":
address_name = doc.shipping_address or doc.supplier_address
if address_name:
address = frappe.db.get_value("Address", address_name, ["gst_state", "gst_state_number"], as_dict=1)
doc.place_of_supply = cstr(address.gst_state_number) + "-" + cstr(address.gst_state)
# don't remove this function it is used in tests
def test_method():
'''test function'''
return 'overridden'
| 36.8875 | 102 | 0.74856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.190783 |
fb32689f50782a5ff37cf378f6a450894a0dc23f
| 22,922 |
py
|
Python
|
tests/cupy_tests/test_cublas.py
|
Onkar627/cupy
|
8eef1ad5393c0a92c5065bc05137bf997f37044a
|
[
"MIT"
] | 1 |
2022-01-12T22:57:54.000Z
|
2022-01-12T22:57:54.000Z
|
tests/cupy_tests/test_cublas.py
|
Onkar627/cupy
|
8eef1ad5393c0a92c5065bc05137bf997f37044a
|
[
"MIT"
] | null | null | null |
tests/cupy_tests/test_cublas.py
|
Onkar627/cupy
|
8eef1ad5393c0a92c5065bc05137bf997f37044a
|
[
"MIT"
] | 1 |
2022-03-21T20:19:12.000Z
|
2022-03-21T20:19:12.000Z
|
import numpy
import pytest
import cupy
from cupy import cublas
from cupy import testing
from cupy.testing import _attr
@testing.parameterize(*testing.product({
'dtype': ['float32', 'float64', 'complex64', 'complex128'],
'n': [10, 33, 100],
'bs': [None, 1, 10],
'nrhs': [None, 1, 10],
}))
@_attr.gpu
class TestBatchedGesv:
_tol = {'f': 5e-5, 'd': 1e-12}
def _make_random_matrices(self, shape, xp):
a = testing.shaped_random(shape, xp, dtype=self.r_dtype, scale=1)
if self.dtype.char in 'FD':
a = a + 1j * testing.shaped_random(shape, xp, dtype=self.r_dtype,
scale=1)
return a
def _make_well_conditioned_matrices(self, shape):
a = self._make_random_matrices(shape, numpy)
u, s, vh = numpy.linalg.svd(a)
s = testing.shaped_random(s.shape, numpy, dtype=self.r_dtype,
scale=1) + 1
a = numpy.einsum('...ik,...k,...kj->...ij', u, s, vh)
return cupy.array(a)
@pytest.fixture(autouse=True)
def setUp(self):
self.dtype = numpy.dtype(self.dtype)
if self.dtype.char in 'fF':
self.r_dtype = numpy.float32
else:
self.r_dtype = numpy.float64
n = self.n
bs = 1 if self.bs is None else self.bs
nrhs = 1 if self.nrhs is None else self.nrhs
a = self._make_well_conditioned_matrices((bs, n, n))
x = self._make_random_matrices((bs, n, nrhs), cupy)
b = cupy.matmul(a, x)
a_shape = (n, n) if self.bs is None else (bs, n, n)
b_shape = [n]
if self.bs is not None:
b_shape.insert(0, bs)
if self.nrhs is not None:
b_shape.append(nrhs)
self.a = a.reshape(a_shape)
self.b = b.reshape(b_shape)
self.x_ref = x.reshape(b_shape)
if self.r_dtype == numpy.float32:
self.tol = self._tol['f']
elif self.r_dtype == numpy.float64:
self.tol = self._tol['d']
def test_batched_gesv(self):
x = cublas.batched_gesv(self.a, self.b)
cupy.testing.assert_allclose(x, self.x_ref,
rtol=self.tol, atol=self.tol)
@testing.parameterize(*testing.product({
'dtype': ['float32', 'float64', 'complex64', 'complex128'],
'n': [10, 100],
'mode': [None, numpy, cupy],
}))
@_attr.gpu
class TestLevel1Functions:
_tol = {'f': 1e-5, 'd': 1e-12}
@pytest.fixture(autouse=True)
def setUp(self):
self.dtype = numpy.dtype(self.dtype)
self.tol = self._tol[self.dtype.char.lower()]
def _make_random_vector(self):
return testing.shaped_random((self.n,), cupy, dtype=self.dtype)
def _make_out(self, dtype):
out = None
if self.mode is not None:
out = self.mode.empty([], dtype=dtype)
return out
def _check_pointer(self, a, b):
if a is not None and b is not None:
assert self._get_pointer(a) == self._get_pointer(b)
def _get_pointer(self, a):
if isinstance(a, cupy.ndarray):
return a.data.ptr
else:
return a.ctypes.data
def test_iamax(self):
x = self._make_random_vector()
ref = cupy.argmax(cupy.absolute(x.real) + cupy.absolute(x.imag))
out = self._make_out('i')
res = cublas.iamax(x, out=out)
self._check_pointer(res, out)
# Note: iamax returns 1-based index
cupy.testing.assert_array_equal(res - 1, ref)
def test_iamin(self):
x = self._make_random_vector()
ref = cupy.argmin(cupy.absolute(x.real) + cupy.absolute(x.imag))
out = self._make_out('i')
res = cublas.iamin(x, out=out)
self._check_pointer(res, out)
# Note: iamin returns 1-based index
cupy.testing.assert_array_equal(res - 1, ref)
def test_asum(self):
x = self._make_random_vector()
ref = cupy.sum(cupy.absolute(x.real) + cupy.absolute(x.imag))
out = self._make_out(self.dtype.char.lower())
res = cublas.asum(x, out=out)
self._check_pointer(res, out)
cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol)
def test_axpy(self):
x = self._make_random_vector()
y = self._make_random_vector()
a = 1.1
if self.dtype.char in 'FD':
a = a - 1j * 0.9
ref = a * x + y
if self.mode is not None:
a = self.mode.array(a, dtype=self.dtype)
cublas.axpy(a, x, y)
cupy.testing.assert_allclose(y, ref, rtol=self.tol, atol=self.tol)
def test_dot(self):
x = self._make_random_vector()
y = self._make_random_vector()
ref = x.dot(y)
out = self._make_out(self.dtype)
if self.dtype.char in 'FD':
with pytest.raises(TypeError):
res = cublas.dot(x, y, out=out)
return
res = cublas.dot(x, y, out=out)
self._check_pointer(res, out)
cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol)
def test_dotu(self):
x = self._make_random_vector()
y = self._make_random_vector()
ref = x.dot(y)
out = self._make_out(self.dtype)
res = cublas.dotu(x, y, out=out)
self._check_pointer(res, out)
cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol)
def test_dotc(self):
x = self._make_random_vector()
y = self._make_random_vector()
ref = x.conj().dot(y)
out = self._make_out(self.dtype)
res = cublas.dotc(x, y, out=out)
self._check_pointer(res, out)
cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol)
def test_nrm2(self):
x = self._make_random_vector()
ref = cupy.linalg.norm(x)
out = self._make_out(self.dtype.char.lower())
res = cublas.nrm2(x, out=out)
self._check_pointer(res, out)
cupy.testing.assert_allclose(res, ref, rtol=self.tol, atol=self.tol)
def test_scal(self):
x = self._make_random_vector()
a = 1.1
if self.dtype.char in 'FD':
a = a - 1j * 0.9
ref = a * x
if self.mode is not None:
a = self.mode.array(a, dtype=self.dtype)
cublas.scal(a, x)
cupy.testing.assert_allclose(x, ref, rtol=self.tol, atol=self.tol)
@testing.parameterize(*testing.product({
'dtype': ['float32', 'float64', 'complex64', 'complex128'],
'shape': [(10, 9), (9, 10)],
'trans': ['N', 'T', 'H'],
'order': ['C', 'F'],
'mode': [None, numpy, cupy],
}))
@_attr.gpu
class TestGemv:
_tol = {'f': 1e-5, 'd': 1e-12}
@pytest.fixture(autouse=True)
def setUp(self):
self.dtype = numpy.dtype(self.dtype)
self.tol = self._tol[self.dtype.char.lower()]
def test_gemv(self):
a = testing.shaped_random(self.shape, cupy, dtype=self.dtype,
order=self.order)
if self.trans == 'N':
ylen, xlen = self.shape
else:
xlen, ylen = self.shape
x = testing.shaped_random((xlen,), cupy, dtype=self.dtype)
y = testing.shaped_random((ylen,), cupy, dtype=self.dtype)
alpha = 0.9
beta = 0.8
if self.dtype.char in 'FD':
alpha = alpha - 1j * 0.7
beta = beta - 1j * 0.6
if self.trans == 'N':
ref = alpha * a.dot(x) + beta * y
elif self.trans == 'T':
ref = alpha * a.T.dot(x) + beta * y
elif self.trans == 'H':
ref = alpha * a.T.conj().dot(x) + beta * y
if self.mode is not None:
alpha = self.mode.array(alpha)
beta = self.mode.array(beta)
cupy.cublas.gemv(self.trans, alpha, a, x, beta, y)
cupy.testing.assert_allclose(y, ref, rtol=self.tol, atol=self.tol)
@testing.parameterize(*testing.product({
'rank': [5, 9],
'band': [0, 1, 3],
'lower': [0, 1],
'order': ['C', 'F'],
'mode': [None, numpy, cupy],
}))
@_attr.gpu
class TestSbmv:
_tol = {'f': 1e-5, 'd': 1e-12}
def _gen2band(self, A, ku=0, kl=0, order='C'):
assert A.ndim == 2
n, m = A.shape
ldm, lda = n, 1 + ku + kl
B = numpy.zeros((lda, ldm), dtype=A.dtype, order=order)
for j in range(n):
k = ku - j
for i in range(max(0, j-ku), min(m, j + kl + 1)):
B[(k + i), j] = A[i, j]
return B
@testing.for_dtypes('fd')
def test_sbmv(self, dtype):
dtype = numpy.dtype(dtype)
alpha, beta = 3.0, 2.0
n, k = self.rank, self.band
a = numpy.eye(n, n, 0, dtype, self.order)
a *= numpy.random.randint(20)
for i in range(1, k+1):
band = numpy.random.randint(20, size=n-i)
a += numpy.diag(band, k=+i)
a += numpy.diag(band, k=-i)
x = numpy.random.randint(20, size=n).astype(a.dtype)
y = numpy.random.randint(20, size=n).astype(a.dtype)
ku, kl = k, 0
if self.lower == 1:
ku, kl = kl, ku
b = self._gen2band(a, ku, kl)
a, b = cupy.asarray(a), cupy.asarray(b)
x, y = cupy.asarray(x), cupy.asarray(y)
ref = alpha * a.dot(x) + beta * y
if self.mode is not None:
alpha = self.mode.array(alpha)
beta = self.mode.array(beta)
y_ret = cupy.cublas.sbmv(k, alpha, b, x, beta, y, lower=self.lower)
tol = self._tol[dtype.char.lower()]
cupy.testing.assert_allclose(y, ref, rtol=tol, atol=tol)
cupy.testing.assert_allclose(y_ret, ref, rtol=tol, atol=tol)
@testing.parameterize(*testing.product({
'dtype': ['float32', 'float64', 'complex64', 'complex128'],
'shape': [(10, 9), (9, 10)],
'order': ['C', 'F'],
'mode': [None, numpy, cupy],
}))
@_attr.gpu
class TestGer:
_tol = {'f': 1e-5, 'd': 1e-12}
@pytest.fixture(autouse=True)
def setUp(self):
self.dtype = numpy.dtype(self.dtype)
self.tol = self._tol[self.dtype.char.lower()]
self.a = testing.shaped_random(self.shape, cupy, dtype=self.dtype,
order=self.order)
self.x = testing.shaped_random((self.shape[0],), cupy,
dtype=self.dtype)
self.y = testing.shaped_random((self.shape[1],), cupy,
dtype=self.dtype)
self.alpha = 1.1
if self.dtype.char in 'FD':
self.alpha = self.alpha - 1j * 0.9
def test_ger(self):
if self.dtype.char in 'FD':
with pytest.raises(TypeError):
cublas.ger(self.alpha, self.x, self.y, self.a)
return
ref = self.alpha * cupy.outer(self.x, self.y) + self.a
if self.mode is not None:
self.alpha = self.mode.array(self.alpha)
cublas.ger(self.alpha, self.x, self.y, self.a)
cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol)
def test_geru(self):
ref = self.alpha * cupy.outer(self.x, self.y) + self.a
if self.mode is not None:
self.alpha = self.mode.array(self.alpha)
cublas.geru(self.alpha, self.x, self.y, self.a)
cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol)
def test_gerc(self):
ref = self.alpha * cupy.outer(self.x, self.y.conj()) + self.a
if self.mode is not None:
self.alpha = self.mode.array(self.alpha)
cublas.gerc(self.alpha, self.x, self.y, self.a)
cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol)
@testing.parameterize(*testing.product({
'nk': [(5, 9), (9, 5)],
'transa': ['N', 'T'],
'ordera': ['F', 'C'],
'orderc': ['F', 'C'],
'lower': [0, 1],
'mode': [None, numpy, cupy]
}))
@_attr.gpu
class TestSyrk:
_tol = {'f': 1e-5, 'd': 1e-12}
def _make_matrix(self, m, n, trans, order, dtype):
if trans == 'N':
shape = (m, n)
else:
shape = (n, m)
return testing.shaped_random(shape, cupy, dtype=dtype, order=order,
scale=1.0)
def _trans_matrix(self, a, trans):
if trans == 'N':
return a
return a.T
@testing.for_dtypes('fdFD')
def test_syrk(self, dtype):
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
alpha, beta = 3.0, 2.0
if dtype.char in 'FD':
alpha = alpha - 1j * 2.0
beta = beta + 1j * 5.0
n, k = self.nk
a = self._make_matrix(n, k, self.transa, self.ordera, dtype)
aa = self._trans_matrix(a, self.transa)
ref = alpha * aa.dot(aa.T) # beta is used as a placeholder only
c = cublas.syrk(self.transa, a, alpha=alpha, beta=beta,
lower=self.lower)
rr, cc = cupy.asnumpy(ref), cupy.asnumpy(c)
if self.lower:
rr[numpy.triu_indices_from(rr, 1)] = 0
else:
rr[numpy.tril_indices_from(rr, -1)] = 0
rru = rr[numpy.triu_indices_from(rr)]
ccu = cc[numpy.triu_indices_from(cc)]
rrl = rr[numpy.tril_indices_from(rr)]
ccl = cc[numpy.tril_indices_from(cc)]
cupy.testing.assert_allclose(ccu, rru, rtol=tol, atol=tol)
cupy.testing.assert_allclose(ccl, rrl, rtol=tol, atol=tol)
@testing.for_dtypes('fdFD')
def test_syrk_out(self, dtype):
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
alpha, beta = 2.3, 1.7
if dtype.char in 'FD':
alpha = alpha - 1j * 0.7
beta = beta + 1j * 2.3
n, k = self.nk
a = self._make_matrix(n, k, self.transa, self.ordera, dtype)
aa = self._trans_matrix(a, self.transa)
m = aa.shape[0]
c = self._make_matrix(m, m, 'N', self.orderc, dtype)
c0 = cupy.array(c)
ref = alpha * aa.dot(aa.T) + beta * c
cublas.syrk(self.transa, a, out=c, alpha=alpha, beta=beta,
lower=self.lower)
rr, c0, cc = cupy.asnumpy(ref), cupy.asnumpy(c0), cupy.asnumpy(c)
if self.lower:
trii = numpy.triu_indices_from(rr, 1)
else:
trii = numpy.tril_indices_from(rr, -1)
rr[trii] = c0[trii]
rru = rr[numpy.triu_indices_from(rr)]
ccu = cc[numpy.triu_indices_from(cc)]
rrl = rr[numpy.tril_indices_from(rr)]
ccl = cc[numpy.tril_indices_from(cc)]
cupy.testing.assert_allclose(ccu, rru, rtol=tol, atol=tol)
cupy.testing.assert_allclose(ccl, rrl, rtol=tol, atol=tol)
@testing.parameterize(*testing.product({
'mnk': [(8, 9, 10), (10, 9, 8)],
'transa': ['N', 'T', 'H'],
'transb': ['N', 'T', 'H'],
'ordera': ['C', 'F'],
'orderb': ['C', 'F'],
'orderc': ['C', 'F'],
'mode': [None, numpy, cupy],
}))
@_attr.gpu
class TestGemmAndGeam:
_tol = {'f': 1e-5, 'd': 1e-12}
def _make_matrix(self, m, n, trans, order, dtype):
if trans == 'N':
shape = (m, n)
else:
shape = (n, m)
return testing.shaped_random(shape, cupy, dtype=dtype, order=order,
scale=1.0)
def _trans_matrix(self, a, trans):
if trans == 'T':
a = a.T
elif trans == 'H':
a = a.T.conj()
return a
@testing.for_dtypes('fdFD')
def test_gemm(self, dtype):
if not (self.mode is None and self.orderc == 'C'):
pytest.skip()
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
m, n, k = self.mnk
a = self._make_matrix(m, k, self.transa, self.ordera, dtype)
b = self._make_matrix(k, n, self.transb, self.orderb, dtype)
aa = self._trans_matrix(a, self.transa)
bb = self._trans_matrix(b, self.transb)
ref = aa.dot(bb)
c = cublas.gemm(self.transa, self.transb, a, b)
cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol)
@testing.for_dtypes('fdFD')
def test_gemm_out(self, dtype):
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
m, n, k = self.mnk
a = self._make_matrix(m, k, self.transa, self.ordera, dtype)
b = self._make_matrix(k, n, self.transb, self.orderb, dtype)
c = self._make_matrix(m, n, 'N', self.orderc, dtype)
alpha = 0.9
beta = 0.8
if dtype.char in 'FD':
alpha = alpha - 1j * 0.7
beta = beta - 1j * 0.6
aa = self._trans_matrix(a, self.transa)
bb = self._trans_matrix(b, self.transb)
ref = alpha * aa.dot(bb) + beta * c
if self.mode is not None:
alpha = self.mode.array(alpha)
beta = self.mode.array(beta)
cublas.gemm(self.transa, self.transb, a, b, out=c,
alpha=alpha, beta=beta)
cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol)
@testing.for_dtypes('fdFD')
def test_geam(self, dtype):
if self.orderc != 'F':
pytest.skip()
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
m, n, _ = self.mnk
a = self._make_matrix(m, n, self.transa, self.ordera, dtype)
b = self._make_matrix(m, n, self.transb, self.orderb, dtype)
alpha = 0.9
beta = 0.8
if dtype.char in 'FD':
alpha = alpha - 1j * 0.7
beta = beta - 1j * 0.6
aa = self._trans_matrix(a, self.transa)
bb = self._trans_matrix(b, self.transb)
ref = alpha * aa + beta * bb
if self.mode is not None:
alpha = self.mode.array(alpha)
beta = self.mode.array(beta)
c = cublas.geam(self.transa, self.transb, alpha, a, beta, b)
cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol)
@testing.for_dtypes('fdFD')
def test_geam_out(self, dtype):
dtype = numpy.dtype(dtype)
tol = self._tol[dtype.char.lower()]
m, n, _ = self.mnk
a = self._make_matrix(m, n, self.transa, self.ordera, dtype)
b = self._make_matrix(m, n, self.transb, self.orderb, dtype)
c = self._make_matrix(m, n, 'N', self.orderc, dtype)
alpha = 0.9
beta = 0.8
if dtype.char in 'FD':
alpha = alpha - 1j * 0.7
beta = beta - 1j * 0.6
aa = self._trans_matrix(a, self.transa)
bb = self._trans_matrix(b, self.transb)
ref = alpha * aa + beta * bb
if self.mode is not None:
alpha = self.mode.array(alpha)
beta = self.mode.array(beta)
cublas.geam(self.transa, self.transb, alpha, a, beta, b, out=c)
cupy.testing.assert_allclose(c, ref, rtol=tol, atol=tol)
@testing.parameterize(*testing.product({
'shape': [(9, 10), (10, 9)],
'side': ['L', 'R'],
'ordera': ['C', 'F'],
'orderc': ['C', 'F'],
}))
@_attr.gpu
class TestDgmm:
_tol = {'f': 1e-5, 'd': 1e-12}
def _setup(self, dtype, xdim=1):
self.dtype = numpy.dtype(dtype)
self.tol = self._tol[self.dtype.char.lower()]
self.a = testing.shaped_random(self.shape, cupy, dtype=dtype,
order=self.ordera, scale=1.0)
if self.side == 'L':
xlen = self.shape[0]
elif self.side == 'R':
xlen = self.shape[1]
if xdim == 0:
self.x = cupy.array(1.1, dtype=dtype)
elif xdim == 1:
self.x = testing.shaped_random(
(xlen,), cupy, dtype=dtype, scale=1.0)
elif xdim == 2:
self.x = testing.shaped_random(
(xlen, xlen), cupy, dtype=dtype, scale=1.0)
@testing.for_dtypes('fdFD')
def test_dgmm(self, dtype):
if self.orderc != 'F':
pytest.skip()
self._setup(dtype)
if self.side == 'L':
ref = cupy.diag(self.x) @ self.a
elif self.side == 'R':
ref = self.a @ cupy.diag(self.x)
c = cublas.dgmm(self.side, self.a, self.x)
cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
@testing.for_dtypes('fdFD')
def test_dgmm_out(self, dtype):
self._setup(dtype)
if self.side == 'L':
ref = cupy.diag(self.x) @ self.a
elif self.side == 'R':
ref = self.a @ cupy.diag(self.x)
c = cupy.empty(self.shape, order=self.orderc, dtype=dtype)
cublas.dgmm(self.side, self.a, self.x, out=c)
cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
@testing.for_dtypes('fdFD')
def test_dgmm_inplace(self, dtype):
if self.orderc != 'F':
pytest.skip()
self._setup(dtype)
if self.side == 'L':
ref = cupy.diag(self.x) @ self.a
elif self.side == 'R':
ref = self.a @ cupy.diag(self.x)
cublas.dgmm(self.side, self.a, self.x, out=self.a)
cupy.testing.assert_allclose(self.a, ref, rtol=self.tol, atol=self.tol)
_dgmm_incx_minus_one_hip_skip_condition = [
('C', 'F', (9, 10), 'R'),
('C', 'F', (10, 9), 'R'),
('F', 'F', (9, 10), 'L'),
('F', 'F', (10, 9), 'L'),
]
def _check_dgmm_incx_minus_one_hip_skip_condition(self):
return (self.ordera, self.orderc, self.shape, self.side) in \
self._dgmm_incx_minus_one_hip_skip_condition
@testing.for_dtypes('fdFD')
def test_dgmm_incx_minus_one(self, dtype):
if self.orderc != 'F':
pytest.skip()
if cupy.cuda.runtime.is_hip:
if self._check_dgmm_incx_minus_one_hip_skip_condition():
pytest.xfail('HIP dgmm may have a bug')
self._setup(dtype)
if self.side == 'L':
ref = cupy.diag(self.x[::-1]) @ self.a
elif self.side == 'R':
ref = self.a @ cupy.diag(self.x[::-1])
c = cublas.dgmm(self.side, self.a, self.x, incx=-1)
cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
@testing.for_dtypes('fdFD')
def test_dgmm_x_scalar(self, dtype):
if self.orderc != 'F':
pytest.skip()
self._setup(dtype, xdim=0)
ref = self.x * self.a
c = cublas.dgmm(self.side, self.a, self.x, incx=0)
cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
@testing.for_dtypes('fdFD')
def test_dgmm_x_matrix(self, dtype):
if self.orderc != 'F':
pytest.skip()
self._setup(dtype, xdim=2)
if self.side == 'L':
ref = cupy.diag(cupy.diag(self.x)) @ self.a
incx = self.shape[0] + 1
elif self.side == 'R':
ref = self.a @ cupy.diag(cupy.diag(self.x))
incx = self.shape[1] + 1
c = cublas.dgmm(self.side, self.a, self.x, incx=incx)
cupy.testing.assert_allclose(c, ref, rtol=self.tol, atol=self.tol)
| 35.871674 | 79 | 0.547596 | 21,133 | 0.921953 | 0 | 0 | 22,778 | 0.993718 | 0 | 0 | 989 | 0.043146 |
fb336072c83fb710a31348edb291a0b22c416bc1
| 706 |
py
|
Python
|
ProgsByDataset/UnpaywallMAG/create_unpaywall_refs.py
|
ashwath92/MastersThesis
|
f74755dc0c32f316da3c860dd5dbfa4c9cad97b3
|
[
"MIT"
] | 5 |
2020-11-05T07:11:54.000Z
|
2021-08-04T21:37:28.000Z
|
ProgsByDataset/UnpaywallMAG/create_unpaywall_refs.py
|
ashwath92/MastersThesis
|
f74755dc0c32f316da3c860dd5dbfa4c9cad97b3
|
[
"MIT"
] | null | null | null |
ProgsByDataset/UnpaywallMAG/create_unpaywall_refs.py
|
ashwath92/MastersThesis
|
f74755dc0c32f316da3c860dd5dbfa4c9cad97b3
|
[
"MIT"
] | 4 |
2020-11-05T06:04:38.000Z
|
2021-08-02T16:25:42.000Z
|
import re
import csv
# Unpaywall citing, cited list based on mag ids
unpaywall_citing_cited_file = open('AdditionalOutputs/unpaywallmag_references.tsv', 'w')
fieldnames = ['citing_mag_id', 'cited_mag_id']
writer = csv.DictWriter(unpaywall_citing_cited_file, delimiter="\t", fieldnames=fieldnames)
citation_pattern = re.compile(r'(=-=)([0-9]+)(-=-)')
with open('inputfiles/training_no20182019_with_contexts.txt', 'r') as file:
for line in file:
citing_paperid = line.split()[0]
for citation_marker in citation_pattern.finditer(line):
fetched_mag_id = citation_marker.group(2)
writer.writerow({'citing_mag_id': citing_paperid,'cited_mag_id': fetched_mag_id})
| 39.222222 | 93 | 0.730878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 234 | 0.331445 |
fb336c9971fca1cc78e0225c7dbfc79890eb6bc4
| 721 |
py
|
Python
|
Configuration/Skimming/python/PA_MinBiasSkim_cff.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | null | null | null |
Configuration/Skimming/python/PA_MinBiasSkim_cff.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | null | null | null |
Configuration/Skimming/python/PA_MinBiasSkim_cff.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
# HLT dimuon trigger
import HLTrigger.HLTfilters.hltHighLevel_cfi
hltMinBiasHI = HLTrigger.HLTfilters.hltHighLevel_cfi.hltHighLevel.clone()
hltMinBiasHI.HLTPaths = ["HLT_PAL1MinimumBiasHF_OR_SinglePixelTrack_ForSkim_v*"]
hltMinBiasHI.throw = False
hltMinBiasHI.andOr = True
# selection of valid vertex
primaryVertexFilterForMinBias = cms.EDFilter("VertexSelector",
src = cms.InputTag("offlinePrimaryVertices"),
cut = cms.string("!isFake && abs(z) <= 25 && position.Rho <= 2"),
filter = cms.bool(True), # otherwise it won't filter the events
)
# MinBias skim sequence
minBiasSkimSequence = cms.Sequence(
hltMinBiasHI *
primaryVertexFilterForMinBias
)
| 32.772727 | 80 | 0.769764 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 248 | 0.343967 |
fb3473188063f91e4e0dd179c38bc4a0e03e103e
| 4,523 |
py
|
Python
|
backend/tests/date_arr_test.py
|
byamba3/mobilemetrics-backend
|
2e6c53325ecff842bde8c8fe19de220e8f90cb1d
|
[
"Unlicense"
] | null | null | null |
backend/tests/date_arr_test.py
|
byamba3/mobilemetrics-backend
|
2e6c53325ecff842bde8c8fe19de220e8f90cb1d
|
[
"Unlicense"
] | 1 |
2018-08-14T21:26:02.000Z
|
2018-08-14T21:26:16.000Z
|
backend/tests/date_arr_test.py
|
byamba3/mobilemetrics-backend
|
2e6c53325ecff842bde8c8fe19de220e8f90cb1d
|
[
"Unlicense"
] | null | null | null |
import datetime
from calendar import monthrange
import calendar
import numpy as np
#######
#For generate origin repayment schedule date and days columns
#######
start_day = 1
start_month = 1
start_year = 2012
num_installment = 12
installment_time_period = 'months'
change_col_idx = 1
change_row_idx = 2
change_val = 100
modify_days = change_col_idx == 1
modify_date = change_col_idx == 2
days_dict = {'days':1, 'weeks':7, 'two-weeks':14, '15 days':15, '4 weeks': 28}
month_num_to_str_dict = {1:'Jan', 2: 'Feb', 3: 'Mar', 4:'Apr', 5:'May', 6:'Jun', 7:'Jul', 8:'Aug', 9:'Sep', 10:'Oct', 11:'Nov', 12:'Dec'}
# represent days in datetime objects
def get_num_days(period, prev_date):
if period == 'months':
return monthrange(prev_date.year, prev_date.month)[1]
elif period == 'quarters':
days_aggreg = 0
for idx in range(3):
days_aggreg += monthrange(prev_date.year, prev_date.month)[1]
# update the prev_date forward by one month
prev_date += datetime.timedelta(days=get_num_days('months', prev_date))
return days_aggreg
elif period == 'half-years':
days_aggreg = 0
for idx in range(6):
days_aggreg += monthrange(prev_date.year, prev_date.month)[1]
# update the prev_date forward by one month
prev_date += datetime.timedelta(days=get_num_days('months', prev_date))
return days_aggreg
elif period == 'years':
if calendar.isleap(prev_date.year):
return 366
else:
return 365
else:
return days_dict[period]
def calc_origin_days(day, month, year, installment_time_period, num_installment):
date_arr = []
day_num_arr = []
start_date = datetime.datetime(year=year, month=month, day=day)
prev_date = start_date
day_num_arr.append(0)
start_date_str = '{0}-{1}-{2}'.format(start_date.day, month_num_to_str_dict[start_date.month], start_date.year)
date_arr.append(start_date_str)
for idx in range(num_installment):
days_to_incre = get_num_days(installment_time_period, prev_date)
new_date = prev_date + datetime.timedelta(days=days_to_incre)
new_date_str = '{0}-{1}-{2}'.format(new_date.day, month_num_to_str_dict[new_date.month], new_date.year)
date_arr.append(new_date_str)
day_num_arr.append(days_to_incre)
prev_date = new_date
return date_arr, day_num_arr
date_arr, day_num_arr = calc_origin_days(start_day, start_month, start_year, installment_time_period, num_installment)
for idx in range(len(date_arr)):
print ('{0} {1}'.format(date_arr[idx], day_num_arr[idx]))
# TODO: save/update the origin matrix with correct version number
#######
#For repayment schedule date and days on change
#######
def on_change_day(input_date_arr, input_day_arr, change_row_idx, change_val, prev_changes):
new_date_arr = []
new_day_num_arr = []
prev_changes[change_row_idx] = change_val
date_col = input_date_arr
day_col = input_day_arr
print (input_day_arr)
start_date = datetime.datetime.strptime(date_col[0], '%d-%b-%Y')
prev_date = start_date
new_date_arr.append(date_col[0])
new_day_num_arr.append(0)
for idx in range(1,len(date_col)):
if prev_changes[idx] != None:
days_to_incre = prev_changes[idx]
else:
days_to_incre = get_num_days(installment_time_period, prev_date)
new_date = prev_date + datetime.timedelta(days=days_to_incre)
new_date_str = '{0}-{1}-{2}'.format(new_date.day, month_num_to_str_dict[new_date.month], new_date.year)
new_date_arr.append(new_date_str)
new_day_num_arr.append(days_to_incre)
prev_date = new_date
return new_date_arr, new_day_num_arr
prev_changes = np.zeros(len(date_arr), dtype=object)
for idx in range(len(prev_changes)):
prev_changes[idx] = None
# if modify_days:
new_date_arr, new_day_num_arr = on_change_day(date_arr, day_num_arr, 1, 100, prev_changes)
prev_changes[1] = 100
# TODO save the updated user change matrix to database
print ("#"*10+"after change")
for idx in range(len(new_date_arr)):
print ('{0} {1}'.format(new_date_arr[idx], new_day_num_arr[idx]))
# print (new_date_arr)
new_date_arr, new_day_num_arr = on_change_day(new_date_arr, new_day_num_arr, 4, 100, prev_changes)
prev_changes[4] = 100
print ("#"*10+"after change")
for idx in range(len(new_date_arr)):
print ('{0} {1}'.format(new_date_arr[idx], new_day_num_arr[idx]))
| 35.335938 | 137 | 0.689808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 696 | 0.15388 |
fb360bb15c3aa59399389bed8b1e64bb7c548a75
| 3,696 |
py
|
Python
|
launch_hits.py
|
hinthornw/Pointing
|
e3cbaf2c5f54d20fe959406714b38634bc4bb3fe
|
[
"MIT"
] | null | null | null |
launch_hits.py
|
hinthornw/Pointing
|
e3cbaf2c5f54d20fe959406714b38634bc4bb3fe
|
[
"MIT"
] | null | null | null |
launch_hits.py
|
hinthornw/Pointing
|
e3cbaf2c5f54d20fe959406714b38634bc4bb3fe
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import argparse
import json
from boto.mturk.price import Price
from boto.mturk.question import HTMLQuestion
from boto.mturk.connection import MTurkRequestError
import os
import simpleamt
import sys
import inspect
def printPlus(*args):
print(inspect.getouterframes(inspect.currentframe())[1][2], ": ", args)
DEBUG = printPlus
# Motorbike 20-40
# Dog 40-70
# Person 40-70
MINHITS = 40
MAXHITS = 60
if __name__ == '__main__':
parser = argparse.ArgumentParser(parents=[simpleamt.get_parent_parser()])
parser.add_argument('--hit_properties_file', type=argparse.FileType('r'))
parser.add_argument('--html_template')
parser.add_argument('--input_json_file', type=argparse.FileType('r'))
parser.add_argument('--input_cache', type=argparse.FileType('r'))
args = parser.parse_args()
im_names = []
if args.input_cache is not None:
#DEBUG("Cache: {}".format(args.input_cache))
for i, line in enumerate(args.input_cache):
im_names.append(json.loads(line.strip()))
#im_names = json.load(args.input_cache)
input_json_file = []
for i, line in enumerate(args.input_json_file):
input_json_file.append(line)
mtc = simpleamt.get_mturk_connection_from_args(args)
hit_properties = json.load(args.hit_properties_file)
hit_properties['reward'] = Price(hit_properties['reward'])
#hit_properties['Reward'] = str(hit_properties['Reward']).decode('utf-8')
simpleamt.setup_qualifications(hit_properties, mtc)
#DEBUG("After", hit_properties)
frame_height = hit_properties.pop('frame_height')
env = simpleamt.get_jinja_env(args.config)
template = env.get_template(args.html_template)
if args.hit_ids_file is None:
DEBUG('Need to input a hit_ids_file')
sys.exit()
DEBUG(args.hit_ids_file, args.input_cache)
if os.path.isfile(args.hit_ids_file):
DEBUG('hit_ids_file already exists')
sys.exit()
with open(args.hit_ids_file, 'w') as hit_ids_file:
# for i, line in enumerate(args.input_json_file):
print("Launching {} HITS".format(len(input_json_file)))
for i, line in enumerate(input_json_file):
if i < MINHITS:
continue
hit_input = json.loads(line.strip())
# In a previous version I removed all single quotes from the json dump.
# TODO: double check to see if this is still necessary.
template_params = {'input': json.dumps(hit_input)}
if len(im_names) > 0:
template_params['im_names'] = json.dumps(
im_names[i]) # json.dumps(im_names)
html = template.render(template_params)
html_question = HTMLQuestion(html, frame_height)
hit_properties['question'] = html_question
#DEBUG('Rendering Template {}'.format(i))
# with open('rendered_template{}.html'.format(i), 'w+') as f:
# f.write(html)
# This error handling is kinda hacky.
# TODO: Do something better here.
launched = False
while not launched:
try:
boto_hit = mtc.create_hit(**hit_properties)
launched = True
except MTurkRequestError as e:
DEBUG(e)
hit_id = boto_hit[0].HITId
hit_ids_file.write('%s\n' % hit_id)
DEBUG('Launched HIT ID: %s, %d' % (hit_id, i + 1))
if i > MAXHITS:
DEBUG(
"Debugging mode ON. Limiting HIT number to {}".format(
MAXHITS - MINHITS))
break
| 36.594059 | 83 | 0.628247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 926 | 0.250541 |
fb36ee81af365dbd4ec4f2c01bf63d11e510fd29
| 2,405 |
py
|
Python
|
scripts/skinning/utils/skin.py
|
robertjoosten/skinning-tools
|
1f1ec6c092fdc1e39aa82a711a13a0041f9d5730
|
[
"MIT"
] | 31 |
2018-09-08T16:42:01.000Z
|
2022-03-31T12:31:21.000Z
|
scripts/skinning/utils/skin.py
|
robertjoosten/skinning-tools
|
1f1ec6c092fdc1e39aa82a711a13a0041f9d5730
|
[
"MIT"
] | null | null | null |
scripts/skinning/utils/skin.py
|
robertjoosten/skinning-tools
|
1f1ec6c092fdc1e39aa82a711a13a0041f9d5730
|
[
"MIT"
] | 11 |
2018-10-01T09:57:53.000Z
|
2022-03-19T06:53:02.000Z
|
from maya import cmds
from maya.api import OpenMaya
from maya.api import OpenMayaAnim
from functools import partial
from skinning.utils import api
from skinning.vendor import apiundo
def get_cluster_fn(node):
"""
Loop over an objects history and return the skin cluster api node that
is part dependency graph. The geometry provided will be extended to its
shapes.
:param str node:
:return: Skin cluster
:rtype: OpenMayaAnim.MFnSkinCluster
:raise RuntimeError: When no skin cluster can be found.
"""
shapes = cmds.listRelatives(node, shapes=True) or []
shapes.append(node)
for shape in shapes:
shape_obj = api.conversion.get_object(shape)
dependency_iterator = OpenMaya.MItDependencyGraph(
shape_obj,
OpenMaya.MFn.kSkinClusterFilter,
OpenMaya.MItDependencyGraph.kUpstream
)
while not dependency_iterator.isDone():
return OpenMayaAnim.MFnSkinCluster(dependency_iterator.currentNode())
else:
raise RuntimeError("Node '{}' has no skin cluster in its history.".format(node))
def get_cluster(node):
"""
Loop over an objects history and return the skin cluster node that is part
of the history. The geometry provided will be extended to its shapes.
:param str node:
:return: Skin cluster
:rtype: str
"""
skin_cluster_fn = get_cluster_fn(node)
return skin_cluster_fn.name()
# ----------------------------------------------------------------------------
def set_weights(skin_cluster, dag, components, influences, weights_new, weights_old=None):
"""
Set the skin weights via the API but add them to the undo queue using the
apiundo module. If weights old are not provided they are retrieved from
the skin cluster first.
:param OpenMayaAnim.MFnSkinCluster skin_cluster:
:param OpenMaya.MDagPath dag:
:param OpenMaya.MObject components:
:param OpenMaya.MIntArray influences:
:param OpenMaya.MDoubleArray weights_new:
:param OpenMaya.MDoubleArray weights_old:
"""
if weights_old is None:
weights_old, _ = skin_cluster.getWeights(dag, components)
undo = partial(skin_cluster.setWeights, dag, components, influences, weights_old)
redo = partial(skin_cluster.setWeights, dag, components, influences, weights_new)
apiundo.commit(undo=undo, redo=redo)
redo()
| 32.066667 | 90 | 0.69106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,130 | 0.469854 |
fb3817bdbf09d70c073184d064ea74bede74d6b3
| 1,607 |
py
|
Python
|
z_exams/exam_2018_08_26/ex_03_descriptions.py
|
VasAtanasov/SoftUni-Python-Fundamentals
|
471d0537dd6e5c8b61ede92b7673c0d67e2964fd
|
[
"MIT"
] | 1 |
2019-06-05T11:16:08.000Z
|
2019-06-05T11:16:08.000Z
|
z_exams/exam_2018_08_26/ex_03_descriptions.py
|
VasAtanasov/SoftUni-Python-Fundamentals
|
471d0537dd6e5c8b61ede92b7673c0d67e2964fd
|
[
"MIT"
] | null | null | null |
z_exams/exam_2018_08_26/ex_03_descriptions.py
|
VasAtanasov/SoftUni-Python-Fundamentals
|
471d0537dd6e5c8b61ede92b7673c0d67e2964fd
|
[
"MIT"
] | null | null | null |
import re
REGEX = {
"name": r"name is (?P<name>[A-Z][A-Za-z]+ [A-Z][A-Za-z]+)",
"age": r" (?P<age>[0-9]{2}) years",
"date": r"on (?P<date>[0-9]{2}-[0-9]{2}-[0-9]{4})."
}
class Person:
def __init__(self, full_name, age, birth_date):
self.__full_name = full_name
self.__age = age
self.__birth_date = birth_date
@property
def full_name(self):
return self.__full_name
@property
def age(self):
return self.__age
@property
def birth_date(self):
return self.__birth_date
def __str__(self):
return f"Name of the person: {self.full_name}.\n" \
f"Age of the person: {self.age}.\n" \
f"Birthdate of the person: {self.birth_date}."
db = []
while True:
line = input()
if "make migrations" == line:
break
if line[len(line) - 1] != '.':
continue
params = {}
for requirement, regex in REGEX.items():
match = re.search(regex, line)
if not match:
params = {}
break
if requirement == "age":
age = int(match.group(requirement))
if age <= 9 or age >= 100:
break
params["age"] = age
elif requirement == "name":
params["name"] = match.group(requirement)
elif requirement == "date":
params["date"] = match.group(requirement)
if params:
db.append(Person(full_name=params["name"], age=params["age"], birth_date=params["date"]))
if not db:
print("DB is empty")
else:
print("\n".join(map(str, db)))
| 23.289855 | 97 | 0.533914 | 565 | 0.351587 | 0 | 0 | 188 | 0.116988 | 0 | 0 | 348 | 0.216553 |
fb3a72f79061cb96a4d85d2974153bcda4983c49
| 7,667 |
py
|
Python
|
GoogleSheetsJobParser.py
|
mattfromsydney/Scientific-Report-Generator
|
17ddfe42e38d83341460a6de7b0b156bf7cd820a
|
[
"MIT"
] | null | null | null |
GoogleSheetsJobParser.py
|
mattfromsydney/Scientific-Report-Generator
|
17ddfe42e38d83341460a6de7b0b156bf7cd820a
|
[
"MIT"
] | null | null | null |
GoogleSheetsJobParser.py
|
mattfromsydney/Scientific-Report-Generator
|
17ddfe42e38d83341460a6de7b0b156bf7cd820a
|
[
"MIT"
] | null | null | null |
"""
NOTE on google sheets format:
The google sheets document must have the following:
Columns A and B from cell 2 down are all the sample details
Column A is the name of the detail and Column B is the value of the detail
Columns B and onwards can be anything but there must be three speicifc columns
Replicate, Test Name, Result
Each row in these columns is counted as a test result and are grouped together
by test name, replicate
All rows must have a unique TestName-Replicate combination or an error is shown
"""
from SampleData import SampleData
from SRGJob import SRGJob
import time
class GoogleSheetsJobParser:
""" Opens a google sheets document and parses the contents into a job class """
def __init__(self, view):
self.view = view
pass
def parse_document(self, service, document_id):
""" The main function that opens the document, parses the data into a jobs
object and returns the resulting job with all the calculated values
Args:
service (google sheets service): the googlse sheets api service
document_id (str): the google sheets document id to fetch and process
Returns:
job (SRGJob): the job object containing all the job information
and calculated results from the data
"""
# Call the Sheets API to get a reference to the sheet
sheet_ref = service.spreadsheets().get(spreadsheetId=document_id)
#get the sheet details such as individual sheet names, each test sample
#will be on a separate sheet
sheet_details = sheet_ref.execute()
#The job object will hold the list of samples and their data
job = SRGJob()
#go through each sheet in the spreadsheets and process into a SampleData object
for sheet in sheet_details.get('sheets'):
title = sheet.get('properties').get('title')
self.view.display_message("Processing: {}".format(title))
#special Details tab is used to extract the details required for the report
if title == "Details":
self.parse_details(sheet, job, service, document_id)
else:
self.parse_sample(sheet, job, service, document_id)
#slow down the requests as to not brech the x requets in 100 seconds
time.sleep(10)
#return None if no samples were added to this job
if len(job.samples) > 0:
return job
else:
return None
def parse_details(self, sheet, job, service, document_id):
""" Parses the details tab which has information about the report
Args:
sheet (google sheet): The google sheet to process
job (SRGJob): the pointer to the job object to hold all the results
service (google sheets service): the google sheets api service
document_id (str): the google sheets document id to fetch and process
"""
#get the first row with all the column headings as well as the first two
#columns which contain the sample details
result = service.spreadsheets().values().get(spreadsheetId=document_id,
range='Details!A2:B101').execute()
#fields columns, first column is name of field and second
#column is the value for the field
values = result.get('values', [])
for row in values:
if len(row) == 2 and row[0] != '':
job.fields[row[0]] = row[1]
def parse_sample(self, sheet, job, service, document_id):
""" Parses each tab in the sheet as a separate sample.
Args:
sheet (google sheet): The google sheet to process
job (SRGJob): the pointer to the job object to hold all the results
service (google sheets service): the google sheets api service
document_id (str): the google sheets document id to fetch and process
"""
#get the title of the sheet to be used for the ranges reference in batchGet()
title = sheet.get('properties').get('title')
#rowCount is taken here so we know how many rows to extract from the sheet
row_count = sheet.get('properties').get('gridProperties').get('rowCount')
#get the first row with all the column headings as well as the first two
#columns which contain the sample details
result = service.spreadsheets().values().batchGet(spreadsheetId=document_id,
ranges=['{0}!A2:B101'.format(title),
'{0}!A1:Z1'.format(title)]).execute()
#The first element in this array is the sample details columns
#the second element is the column names ie. first row of sheet
valueRanges = result.get('valueRanges', [])
#create a sample data object to store all the extracted data
sample_data = SampleData()
#Sample details columns, first column is name of detail and second
#column is the value for the detail
values = valueRanges[0].get('values', [])
for row in values:
if len(row) == 2 and row[0] != '':
sample_data.add_detail(row[0], row[1])
#names of all the columns in the spreadsheet. Need to get the index
#of the Test Name and Result columns, these are the data columns that
#need to be extracted from the sheet
values = valueRanges[1].get('values', [])
try:
tn_col_index = values[0].index("Test Name")
res_col_index = values[0].index("Result")
except ValueError:
#don't add this sample because the required columns did not exist
return
#convert the index of these columns to the column letter, columns
#are letters in spreadsheets not numbers
alpha_codes = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
tn_code = alpha_codes[tn_col_index]
res_code = alpha_codes[res_col_index]
#make another request to the sheets api to get the test result data
#from the Test Name and Result columns found above
data_result = service.spreadsheets().values().batchGet(spreadsheetId=document_id,
ranges=['{0}!{1}2:{1}{2}'.format(title, tn_code, row_count),
'{0}!{1}2:{1}{2}'.format(title, res_code, row_count)]).execute()
#The first element in data_values will be the Test Name column array
#the second element will be the Result column array
data_values = data_result.get('valueRanges', [])
tn_data = data_values[0].get('values', [])
res_data = data_values[1].get('values', [])
#go through each row in the extracted data and get the value for
#Test Name and Result
for i in range(len(tn_data)):
#Add the Result for this Test Name to the sample_data test result array
if len(tn_data[i]) > 0 and len(res_data[i]) > 0:
sample_data.add_result(tn_data[i][0], res_data[i][0])
#if this sample had some useable data then add it to the job object
if len(sample_data.details) > 0 and len(sample_data.test_results) > 0:
job.add_sample(sample_data)
| 43.811429 | 104 | 0.60493 | 7,021 | 0.915743 | 0 | 0 | 0 | 0 | 0 | 0 | 4,302 | 0.561106 |
fb3a87c53bf0b6e09c757714ba3aaf427f9aff41
| 1,720 |
py
|
Python
|
aws_lambda/lambda.py
|
bluthen/utilbula_list
|
dbe4dfd273c7fa37553d91c5bde8744794ccba70
|
[
"Apache-2.0"
] | null | null | null |
aws_lambda/lambda.py
|
bluthen/utilbula_list
|
dbe4dfd273c7fa37553d91c5bde8744794ccba70
|
[
"Apache-2.0"
] | null | null | null |
aws_lambda/lambda.py
|
bluthen/utilbula_list
|
dbe4dfd273c7fa37553d91c5bde8744794ccba70
|
[
"Apache-2.0"
] | null | null | null |
import json
import base64
import boto3
import uuid
import traceback
def valid_uuid(uuid_str):
try:
val = uuid.UUID(uuid_str, version=4)
return True
except ValueError:
return False
def lambda_handler(event, context):
# TODO implement
print(event, context)
method = event['requestContext']['http']["method"]
if method not in ['GET', 'POST']:
return {'statusCode': 501, 'body': 'Invalid method'}
body = None
if method == 'GET':
hash = event['queryStringParameters']['hash']
else:
# body = base64.b64decode(event["body"]).decode()
body = event['body']
if len(body) > 100000:
return {'statusCode': 400, 'body': 'size is too large'}
body = json.loads(body)
hash = body['hash']
if not valid_uuid(hash):
return {'statusCode': 400, 'body': 'Invalid hash'}
# return {
# 'statusCode': 200,
# 'body': json.dumps(event)
# }
s3 = boto3.resource('s3')
bucket = 'makelist-db.coldstonelabs.net'
obj_str = '{"id": -1}'
try:
obj = s3.Object(bucket, hash)
obj_str = obj.get()['Body'].read().decode()
except:
traceback.print_exc()
if method == 'GET':
return {'statusCode': 404, 'body': 'Nothing found'}
if method == 'GET':
return {'statusCode': 200, 'body': obj_str}
obj = json.loads(obj_str)
if obj['id'] > body['id']:
return {'statusCode': 400, 'body': 'invalid counter'}
save = {"id": body["id"], "title": body["title"], "list": body["list"], "text": body["text"]}
s3.Object(bucket, hash).put(Body=json.dumps(save))
return {'statusCode': 204, 'body': 'Saved'}
| 27.741935 | 97 | 0.568023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 551 | 0.320349 |
fb3a92014d6912b9f49fb4d369df04c828bdc5f0
| 6,565 |
py
|
Python
|
Week3-Case-Studies-Part1/Language-Processing/Language_Processing.py
|
Lamanova/Harvard-PH526x-Lab
|
168e4c16fa067905142bb6be106277f228d591c5
|
[
"MIT"
] | 7 |
2017-08-13T03:03:55.000Z
|
2022-02-06T17:08:12.000Z
|
Week3-Case-Studies-Part1/Language-Processing/Language_Processing.py
|
Lamanova/Harvard-PH526x-Lab
|
168e4c16fa067905142bb6be106277f228d591c5
|
[
"MIT"
] | null | null | null |
Week3-Case-Studies-Part1/Language-Processing/Language_Processing.py
|
Lamanova/Harvard-PH526x-Lab
|
168e4c16fa067905142bb6be106277f228d591c5
|
[
"MIT"
] | 10 |
2017-09-29T08:22:10.000Z
|
2021-06-17T22:51:59.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 28 22:41:26 2017
@author: lamahamadeh
"""
'''
Case Study about Language Processing
'''
#counting words
#---------------
text = "This is a test text. We're keping this text short to keep things manageable." #test text
#Using loops
#-----------
def count_words(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = {}
for word in text.split(" "):
if word in word_counts: #known word case
word_counts[word] += 1
else:
word_counts[word] = 1 #unknown word case
return word_counts
print(count_words(text))
print(len(count_words("This comprehension check is to check for comprehension.")))#first quiz question
#------------------------------------------------------------------------------
#using collections module
#-------------------------
from collections import Counter
def count_words_fast(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = Counter(text.split(" "))
return word_counts
print(count_words_fast)
print(count_words(text)==count_words_fast(text))
print(count_words(text) is count_words_fast(text))#second quiz question
#------------------------------------------------------------------------------
#read a book
#-------------
def read_book(title_path):
"""Read a book and return it as a string"""
with open(title_path, "r", encoding = "utf8") as current_file: #encoding = "utf8" causes a problem when running the code in Python 2.7. However, it runs normally when using Python 3.5.
text = current_file.read()
text = text.replace("\n","").replace("\r","")
return text
text = read_book('/Users/ADB3HAMADL/Desktop/Movies/English/Nora Ephron/You Have Got Mail.txt')#read a book from its path
print(len(text))#number of charatcers in the book
#if there is a famous/wanted line in the book we can use the 'find' method to find it
ind = text.find("go to the mattresses")
print(ind) #print the index number of the famous/wanted sentence
sample_text = text[ind : ind + 953] #slice the paragraph that contains the famous line
print(sample_text) #print the whole chosen paragraph
#------------------------------------------------------------------------------
#Counting the number of unique words
#------------------------------------
def word_stats(word_counts):
"""return the number of unique words and word frequencies"""
num_unique = len(word_counts) #calculate the number of unique words in the text
counts = word_counts.values() #calculate the frequency of each word in the text
return(num_unique,counts)
text = read_book('/Users/ADB3HAMADL/Desktop/Movies/English/Nora Ephron/You Have Got Mail.txt')
word_counts = count_words(text)
(num_unique, counts) = word_stats(word_counts)
print(num_unique) #print the number of unique number of words in the text
print(sum(counts)) #print the sum of the frequency of each word in the text
#------------------------------------------------------------------------------
#Reading multiple files
#-----------------------
import os #to read directories
movie_dir = "/Users/ADB3HAMADL/Desktop/movies" #tells us how many directories in the book directory
import pandas as pd
'''
Pandas example of how to create a dataframe:
--------------------------------------------
import pandas as pd
table = pd.DataFrame(coloums = ("name" , "age"))
table.loc[1] = "James", 22
table.loc[2] = "Jess", 32
print(table)
'''
stats = pd.DataFrame(columns = ("Language" , "Director" , "Title" , "Length" , "Unique")) #this creates an empty dataframe
#with empty table elements with 5 columns
#To put data in the table
title_num =1
for Language in os.listdir(movie_dir):
for Director in os.listdir(movie_dir + "/" + Language):
for Title in os.listdir(movie_dir + "/" + Language + "/" + Director):
inputfile = movie_dir + "/" + Language + "/" + Director + "/" + Title
print(inputfile)
text = read_book(inputfile)
(num_unique, counts) = word_stats(count_words(text))
stats.loc[title_num ] = Language , Director.title(), Title.replace(".txt", " ") , sum(counts) , num_unique #.title() here capitalises the first letter from the first and last name of the director. If we want to capitalise only the first letter, we can use .capitalize().
title_num += 1
print(stats) #print the created dataframe
print(stats.head()) #print the top 5 lines
print(stats.tail()) #print the last 5 lines
print(stats[stats.Language == "English"]) #print the number of entries for language English (a subset from the whole dataframe)
#------------------------------------------------------------------------------
#Plotting Book Statistics
#-------------------------
import matplotlib.pyplot as plt
plt.plot(stats.Length, stats.Unique, "bo")
#OR we can write plt.plot(stats['length'], stats['unique'])
plt.loglog(stats.Length, stats.Unique, "bo") #it is a straight line which suggest data modelling strategies that we might use
plt.figure(figsize = (10,10))
subset = stats[stats.Language == "English"] #extract a subset that has only the rows with English Language
plt.loglog(subset.Length, subset.Unique, "o", label = "English", color = "blue")
subset = stats[stats.Language == "French"] #extract a subset that has only the rows with French Language
plt.loglog(subset.Length, subset.Unique, "o", label = "French", color = "red")
plt.legend()
plt.xlabel("Movie Length")
plt.ylabel("Number of unique words")
plt.savefig("lang_plot.pdf")
#------------------------------------------------------------------------------
#
| 36.071429 | 282 | 0.6262 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,031 | 0.614014 |
fb3c3876b330f5a1310fa02ca86fedafa73ed588
| 273 |
py
|
Python
|
catalog/bindings/csw/brief_record.py
|
NIVANorge/s-enda-playground
|
56ae0a8978f0ba8a5546330786c882c31e17757a
|
[
"Apache-2.0"
] | null | null | null |
catalog/bindings/csw/brief_record.py
|
NIVANorge/s-enda-playground
|
56ae0a8978f0ba8a5546330786c882c31e17757a
|
[
"Apache-2.0"
] | null | null | null |
catalog/bindings/csw/brief_record.py
|
NIVANorge/s-enda-playground
|
56ae0a8978f0ba8a5546330786c882c31e17757a
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from bindings.csw.brief_record_type import BriefRecordType
__NAMESPACE__ = "http://www.opengis.net/cat/csw/2.0.2"
@dataclass
class BriefRecord(BriefRecordType):
class Meta:
namespace = "http://www.opengis.net/cat/csw/2.0.2"
| 24.818182 | 58 | 0.754579 | 110 | 0.40293 | 0 | 0 | 121 | 0.443223 | 0 | 0 | 76 | 0.278388 |
fb3db8f6cafb78c9e368cf61c425d4648f50a4a0
| 8,109 |
py
|
Python
|
R2Py/pre_data_chk.py
|
OpenBookProjects/ipynb
|
72a28109e8e30aea0b9c6713e78821e4affa2e33
|
[
"MIT"
] | 6 |
2015-06-08T12:50:14.000Z
|
2018-11-20T10:05:01.000Z
|
R2Py/pre_data_chk.py
|
OpenBookProjects/ipynb
|
72a28109e8e30aea0b9c6713e78821e4affa2e33
|
[
"MIT"
] | null | null | null |
R2Py/pre_data_chk.py
|
OpenBookProjects/ipynb
|
72a28109e8e30aea0b9c6713e78821e4affa2e33
|
[
"MIT"
] | 8 |
2016-01-26T14:12:50.000Z
|
2021-02-20T14:24:09.000Z
|
# -*- coding: utf-8 -*-
'''check and clean and export as .csv
- 150602 pike aTimeLogger2 date into .csv
- 150601 make handlog export all date into *_all.csv
- 150525 make handlog wxport 2types files
'''
import os
import sys
import fnmatch
def _load_timeline(flines,fpath):
_titles = flines[0]
_exp = ""
for l in flines[1:]:
_l = l[:-1].split(',')
if 1 == len(_l):
pass
else:
_exp += _format_log_line(l)
f_exp = _titles+_exp
open("./data/zq_%s"% fpath.split('-')[-1],'w').write(f_exp)
return _titles,_exp,""
def _format_log_line(crt_line):
_exp = ""
_l = crt_line[:-1].split(',')
as_year = ["20%s-%s-%s"% (_l[0][:2],_l[0][2:4],_l[0][-2:])]
_exp += ",".join(as_year+_l[1:])
_exp += "\n"
return _exp
def _load_pomodoro(flines,fpath):
_titles = flines[0].split()
_pom = "" # "date,"+_titles[1]+"\n"
_exp = "" # _titles[0]+"\n"
for l in flines[1:]:
_l = l[:-1].split()
#print _l
#print len(_l)
if 1 == len(_l):
_exp += _format_log_line(l)
else:
c_l = l.split()
if 0 == len(c_l):
pass
else:
_exp += _format_log_line(c_l[0])
crt_date = c_l[0][:6]
#print crt_date,c_l[1:]
_pom += _reformat_csv(crt_date, c_l[1:])
f_pom = "date,"+_titles[1]+"\n" + _pom
f_exp = _titles[0]+"\n" + _exp
open("./data/zq_%s"% fpath.split('-')[-1],'w').write(f_exp)
open("./data/pom_%s"% fpath.split('-')[-1],'w').write(f_pom)
return _titles,_exp,_pom
def _reformat_csv(crt_date, crt_line):
as_year = "20%s-%s-%s"% (crt_date[:2],crt_date[2:4],crt_date[-2:])
_exp = as_year
for i in crt_line:
if "," in i:
_exp += ","+i[:-1]
else:
_exp += ","+i
_exp += "\n"
return _exp
def chk_all_log(aim_path):
_spath = aim_path.split('/')
if "log" == _spath[-1]:
t_titles = ""
f_pom = "" #"date,"+_titles[1]+"\n"
p_titles = ""
f_exp = "" #_titles[0]+"\n"
for file in os.listdir('./log'):
if fnmatch.fnmatch(file, '*.txt'):
#pd.read_csv('./data/%s' % file)
#_load_data('./log/%s' % file)
fpath = './log/%s' % file
print fpath
fl = open(fpath).readlines()
#print fl[0]
if "Pt" in fl[0]:
#print fpath
_titles,_exp,_pom = _load_pomodoro(fl,fpath)
#print _exp
f_pom += _pom
f_exp += _exp
p_titles = _titles
else:
_titles,_exp,_pom = _load_timeline(fl,fpath)
t_titles = _titles
f_exp += _exp
#print _exp
f_pom = "date,"+_titles[1]+"\n"+f_pom
f_exp = _titles[0]+"\n"+f_exp
open("./data/_all_zhandlog.csv" ,'w').write(f_exp)
open("./data/_all_pomodoro.csv",'w').write(f_pom)
elif "csv" == _spath[-1]:
t_titles = ["date"]
f_exp = {}
'''{'date':['key1',var1]}
'''
for file in os.listdir('./csv'):
if fnmatch.fnmatch(file, '*.csv'):
fpath = './csv/%s' % file
#print fpath
fl = open(fpath).readlines()
date,logs = _load_atl(fl,file)
#print date
f_exp[date] = []
for l in logs.split("\n")[1:]:
_log = l.split(',')
if 1==len(_log):
pass
else:
f_exp[date].append((_log[0],_log[1]))
if _log[0] in t_titles:
pass
else:
t_titles.append(_log[0])
k_date = f_exp.keys()
k_date.sort()
#print k_date
exp_all = [] #sort f_exp logs with date
for d in k_date:
crt_line = {'date':d[:4]} # 130701-130801 ~ 1307
for i in t_titles:
#print f_exp[d]
for v in f_exp[d]:
if i == v[0]:
crt_line[i] = v[1]
exp_all.append(crt_line)
#print t_titles
#print exp_all[0]
exp_lines = []
for l in exp_all:
crt_l = []
for k in t_titles:
if k in l:
if "-" in l[k]:
crt_l.append(l[k])
else:
if "\r" in l[k]:
#print l[k]
crt_l.append(l[k][:-1])
else:
crt_l.append(l[k])
else:
crt_l.append("0.0")
exp_lines.append(crt_l)
#print exp_lines
re_titles = []
for _t in t_titles:
if "总计" == _t:
re_titles.append("Total")
elif "其他" == _t:
re_titles.append("Others")
else:
re_titles.append(_t)
_exp_all = ",".join(re_titles)+"\n"
#print _exp_all
for i in exp_lines:
_exp_all += ",".join([str(j) for j in i])+"\n"
#_exp_all += "\n".join([",".join([str(j) for j in i for i in exp_lines])])
open("./data/_all_atlogger2.csv",'w').write(_exp_all)
print "*_all export..."
def _load_atl(flines,fname):
"""load and pick time log from aTimeLogger2 export
"""
_titles = ""
_exp = ""
_gotit = 0
no_grp = 0
l_exp = []
for l in flines:
if ("%" in l):
no_grp = 1
if ("Percent" in l)or("%" in l):
_gotit = 1
if _gotit:
if "+" in l:
pass
elif "/" in l:
pass
else:
_exp += l
l_exp.append(l)
if no_grp:
_exp = "Class,Duration,Percent\n"
_total,grp_act = _reformat_log(l_exp)
for k in grp_act:
_exp += "%s,"%k + ",".join(grp_act[k]) + "\n"
_exp += "总计,%.2f"% _total
f_exp = _titles+_exp
_expf = fname.split("_")[1]
open("./data/atl2_%s.csv"% _expf,'w').write(f_exp)
return _expf,_exp
def _reformat_log(log):
'''reformt log
- clean ""
- merge no grp. logs
'''
act_map = {'Chaos':['Chaos',]
, 'Life':['Life','运动','交通','Air','用餐','家务']
, 'Input':['Input','阅读','学习','上网']
, 'Output':['Works','交流','工作','GDG','OBP','Pt0']
, 'Livin':['Livin','睡眠','购物','就医','Ukulele','电影','娱乐']
, 'Untracked':['其他','Untracked']
}
grp_act = {}
for l in log:
#print ",".join([i[1:-1] for i in l[:-1].split(',')])
crt_line = [i[1:-1] for i in l[:-1].split(',')]
if "%" in crt_line:
pass
else:
#print crt_line[0].split()[0]
for k in act_map.keys():
if crt_line[0].split()[0] in act_map[k]:
#print k,crt_line[0],crt_line[1:]
if k in grp_act:
grp_act[k].append(crt_line[1:])
else:
grp_act[k] = [crt_line[1:]]
_total = 0
for k in grp_act:
#print k,grp_act[k]
k_time = 0
k_precent = 0
for i in grp_act[k]:
_time = i[0].split(':')
d_time = int(_time[0])+float(_time[1])/60
k_time += d_time
_total += d_time
k_precent += float(i[1])
#print type(d_time)
#print k_time, k_precent
grp_act[k] = ["%.2f"%k_time, str(k_precent)]
#print grp_act
return _total, grp_act
if __name__ == '__main__':
if 2 != len(sys.argv) :
print '''Usage:
$ pre_data_chk.py path/2/[数据目录]
'''
else:
aim_path = sys.argv[1]
chk_all_log(aim_path)
| 30.715909 | 82 | 0.436305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,866 | 0.227867 |
3485a97e04399f112e63493e9a7df6b69342ec0c
| 313 |
py
|
Python
|
Microservices/Events/Events/producer.py
|
satyap54/Microservices-Architecture
|
be397b351a61eb21229fad021590fcb0b07b8089
|
[
"MIT"
] | null | null | null |
Microservices/Events/Events/producer.py
|
satyap54/Microservices-Architecture
|
be397b351a61eb21229fad021590fcb0b07b8089
|
[
"MIT"
] | null | null | null |
Microservices/Events/Events/producer.py
|
satyap54/Microservices-Architecture
|
be397b351a61eb21229fad021590fcb0b07b8089
|
[
"MIT"
] | null | null | null |
import asyncio
import aiormq
async def publish(method, event_id):
connection = await aiormq.connect('')
channel = await connection.channel()
body = bytes(str(event_id), 'utf-8')
await channel.basic_publish(
body, exchange='', routing_key='admin'
)
await connection.close()
| 20.866667 | 46 | 0.664537 | 0 | 0 | 0 | 0 | 0 | 0 | 281 | 0.897764 | 18 | 0.057508 |
3485c332458e998d42d6f67bb82de0ba508582c0
| 8,473 |
py
|
Python
|
timezone/embeds.py
|
duanegtr/legendv3-cogs
|
ffde1452a75ad42b4f6511b612ce486e96fcd6de
|
[
"MIT"
] | 10 |
2020-05-25T13:32:30.000Z
|
2022-02-01T12:33:07.000Z
|
timezone/embeds.py
|
darcyle/tl-cogs
|
6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79
|
[
"MIT"
] | 2 |
2020-05-23T22:53:07.000Z
|
2020-08-09T11:28:12.000Z
|
timezone/embeds.py
|
darcyle/tl-cogs
|
6b13c4a6247115571c5a2bb6ea98ed1fe2d44d79
|
[
"MIT"
] | 7 |
2020-05-18T17:37:33.000Z
|
2022-01-13T04:08:05.000Z
|
"""Embeds for Timezone Legends discord bot."""
import time
import discord
from dateutil.parser import parse # pip install python-dateutil
from pytz import timezone
from datetime import datetime
class Timezone_Embeds:
def __init__(self, description=None, color=0x5c0708, show_footer=True, show_timestamp=True, show_thumbnail=True):
if not show_timestamp:
self.embed = discord.Embed(colour=discord.Colour(color))
else:
if not description:
self.embed = discord.Embed(colour=discord.Colour(color),
timestamp=datetime.utcfromtimestamp(time.time()))
else:
self.embed = discord.Embed(colour=discord.Colour(color),
description=description,
timestamp=datetime.utcfromtimestamp(time.time()))
if show_thumbnail:
#self.embed.set_thumbnail(url="https://cdn.iconscout.com/icon/premium/png-256-thumb/global-time-zone-1480117-1253197.png")
#self.embed.set_thumbnail(url="https://tuccitime.com/modules/xipblog/img/large-time.jpg")
self.embed.set_thumbnail(url="https://cdn.clipart.email/1cb9490f73d090921ded3aa2b1c2bf1f_different-time-zone-no-problem_513-510.png")
if show_footer:
self.embed.set_footer(text="Bot by Vanhorn | Academy",
icon_url="https://vignette.wikia.nocookie.net/clashroyale/images/4/42/GraveyardCard.png/revision/latest/top-crop/width/360/height/450?cb=20171212204803")
def set_title(self, name, icon_url):
self.embed.set_author(name=name,
url="https://discordapp.com/channels/374596069989810176/374597178989215757",
icon_url=icon_url)
async def events(ctx, event_list):
tze = Timezone_Embeds(description="Listing all Events that matched your request (**if any**)")
tze.set_title("Events", "https://www.kindpng.com/picc/m/246-2465899_upcoming-events-icon-calendar-icon-png-transparent-png.png")
for event_name, event_time, time_to_event, time_delta in event_list:
tze.embed.add_field(name="Event", value=f"**{event_name}**", inline=True)
tze.embed.add_field(name="Local Time", value=f"{event_time}", inline=True)
tze.embed.add_field(name="Time Left", value=f"**{time_to_event}**", inline=True)
await ctx.send(embed=tze.embed)
async def created_event(ctx, event, event_id, event_time):
tze = Timezone_Embeds(show_thumbnail=False)
tze.set_title("Event Created", "https://cdn2.vectorstock.com/i/1000x1000/70/11/event-schedule-icon-vector-26627011.jpg")
tze.embed.add_field(name="Name", value=f"**{event}**", inline=True)
tze.embed.add_field(name="ID", value=f"**{event_id}**", inline=True)
tze.embed.add_field(name="When", value=f"{event_time}", inline=True)
await ctx.send(embed=tze.embed)
async def removed_event(ctx, event_id, event):
"""
{'event': 'Test Event', 'when': '2020-05-07T15:46:17.156085+00:00', 'tz': 'America/New_York'}
"""
tze = Timezone_Embeds(show_thumbnail=False)
tze.set_title("Event Removed", "https://cdn2.vectorstock.com/i/1000x1000/70/11/event-schedule-icon-vector-26627011.jpg")
event_name = event['event']
event_tz = event['tz']
fmt = "**%H:%M** %d-%B-%Y **%Z (UTC %z)**"
event_time = parse(event['when']).astimezone(timezone(event_tz)).strftime(fmt)
tze.embed.add_field(name="Name", value=f"**{event_name}**", inline=True)
tze.embed.add_field(name="ID", value=f"**{event_id}**", inline=True)
tze.embed.add_field(name="When", value=f"{event_time}", inline=True)
tze.embed.add_field(name="TZ", value=f"{event_tz}", inline=True)
await ctx.send(embed=tze.embed)
async def show_events(ctx, event_list):
for idx in range(0, len(event_list)):
event_id, event_name, event_time, event_tz, time_delta = event_list[idx]
tze = Timezone_Embeds(show_footer=True if idx == len(event_list)-1 else False,
show_timestamp=True if idx == 0 else False,
show_thumbnail=True if idx == 0 else False)
tze.set_title(f"Event ({event_id})",
"https://images.squarespace-cdn.com/content/v1/5a5ced468a02c79bfe4829bf/1516978000404-CVQ1CO95BEFJ7W2FTGDM/ke17ZwdGBToddI8pDm48kBPauUSMbKdP-TlqMma_x0ZZw-zPPgdn4jUwVcJE1ZvWEtT5uBSRWt4vQZAgTJucoTqqXjS3CfNDSuuf31e0tVFzDLvN5UbLOifpAePtRMTrCg1jr8OpcUFdGiHX6l_hRjFvbuqF0GUInBxxtVhBOn4/events-icon-website-gray.png")
tze.embed.add_field(name="Name", value=f"**{event_name}**", inline=True)
tze.embed.add_field(name="When", value=f"{event_time}", inline=True)
tze.embed.add_field(name="TZ", value=f"**{event_tz}**", inline=True)
await ctx.send(embed=tze.embed)
async def compare(ctx, display_name, other_time, time_amt, position_text):
tze = Timezone_Embeds()
tze.set_title("User TZ Compare", "https://cdn3.iconfinder.com/data/icons/calendar-23/100/Calendar-15-512.png")
tze.embed.add_field(name=f"{display_name}'s time", value=f"**{other_time}**", inline=True)
tze.embed.add_field(name="Which is", value=f"**{time_amt}{position_text}**", inline=True)
await ctx.send(embed=tze.embed)
async def iso(ctx, code=None, tz=None):
tze = Timezone_Embeds()
tze.set_title("ISO", "https://images.assetsdelivery.com/compings_v2/aalbedouin/aalbedouin1808/aalbedouin180806226.jpg")
if not code or not tz:
tze.embed.add_field(name=f"**{code}** is invalid. For a full list, see here:", value="[Timezone Link](<https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes>)", inline=False)
else:
timezones = '\n'.join(tz)
tze.embed.add_field(name=f"Supported timezones for **{code}**:", value=f"**{timezones}**", inline=False)
tze.embed.add_field(name="**NOTE**", value=f"\n**Use** `{ctx.prefix}time tz Continent/City` **to display the current time in that timezone.**", inline=False)
await ctx.send(embed=tze.embed)
async def me(ctx, usertime, time=None):
tze = Timezone_Embeds()
if usertime and time:
tze.embed.add_field(name=f"Your current timezone is:", value=f"**{usertime}.\nThe current time is: {time}", inline=False)
else:
tze.embed.add_field(name=f"You haven't set your timezone yet...",
value=f"Do `{ctx.prefix}time me Continent/City`\nsee [Timezones](<https://en.wikipedia.org/wiki/List_of_tz_database_time_zones>)")
await ctx.send(embed=tze.embed)
async def generic_embeds(ctx, field, value, description=None):
tze = Timezone_Embeds(description)
tze.embed.add_field(name=field, value=value)
await ctx.send(embed=tze.embed)
"""
embed = discord.Embed(colour=discord.Colour(0x5c0708), description="Listing all Events that matched your request (**if any**)", timestamp=datetime.utcfromtimestamp(time.time()))
embed.set_author(name="Events",
url="https://discordapp.com/channels/374596069989810176/374597178989215757",
icon_url="https://www.kindpng.com/picc/m/246-2465899_upcoming-events-icon-calendar-icon-png-transparent-png.png")
embed.set_thumbnail(url="https://cdn.iconscout.com/icon/premium/png-256-thumb/global-time-zone-1480117-1253197.png")
embed.set_footer(text="Bot by Vanhorn | Academy",
icon_url="https://vignette.wikia.nocookie.net/clashroyale/images/4/42/GraveyardCard.png/revision/latest/top-crop/width/360/height/450?cb=20171212204803")
for event_name, event_time, time_to_event in event_list:
embed.add_field(name="Event", value=f"**{event_name}**", inline=True)
embed.add_field(name="Local Time", value=f"**{event_time}**", inline=True)
embed.add_field(name="Time Left", value=f"**{time_to_event}**", inline=True)
#print(embed.to_dict())
await ctx.send(embed=embed)
"""
"""
[x] create_event Creates an event in your timezone, or in a given t...
[x] events Lists all registered events.
[x] show_events Lists all registered events.
[x] remove_event Erases an event if the given ID is found.
[x] compare Compare your saved timezone with another user's timezone.
[x] iso Looks up ISO3166 country codes and gives you a supported ti...
[x] me Sets your timezone.
[x] set Allows the mods to edit timezones.
[x] tell Tells you what the time will be in the given timezone.
[x] tz Gets the time in any timezone.
[x] user Shows the current time for user.
"""
| 57.639456 | 331 | 0.688304 | 1,641 | 0.193674 | 0 | 0 | 0 | 0 | 4,910 | 0.579488 | 4,315 | 0.509265 |
34895fa122da2aa59b08dae55bc7b95297e3503a
| 1,521 |
py
|
Python
|
utils/commonutils.py
|
rupakc/Storyweaver
|
9a6ec1c040a09f730cc6f32ce385f44a79d28663
|
[
"BSL-1.0"
] | null | null | null |
utils/commonutils.py
|
rupakc/Storyweaver
|
9a6ec1c040a09f730cc6f32ce385f44a79d28663
|
[
"BSL-1.0"
] | null | null | null |
utils/commonutils.py
|
rupakc/Storyweaver
|
9a6ec1c040a09f730cc6f32ce385f44a79d28663
|
[
"BSL-1.0"
] | null | null | null |
import hashlib
import shutil
import requests
import os
from config import constants
def get_sha_hash(content):
sha = hashlib.sha1()
sha.update(content)
return sha.hexdigest()
def download_image(image_url,filename_to_save):
response = requests.get(image_url, stream=True)
with open(filename_to_save, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
def get_image_hash_list(image_data_directory_path):
image_list = os.listdir(image_data_directory_path)
image_name_without_extension_list = list([])
for image_name in image_list:
extension_index = image_name.find('.')
if extension_index != -1:
image_name_without_extension_list.append(image_name[:extension_index])
return image_name_without_extension_list
def get_supported_image_hashes(image_hash_name_list):
hash_title_dict = dict({})
filtered_image_hash_dict = dict({})
with open(constants.TITLE_HASH_MAP, 'r') as title_hash_file:
title_hash_list = title_hash_file.read().split('\n')
for title_hash in title_hash_list:
sep_index = title_hash.find('=')
if sep_index != -1:
title = title_hash[:sep_index].strip()
hash_value = title_hash[sep_index+1:].strip()
hash_title_dict[hash_value] = title
for image_name in image_hash_name_list:
if image_name in hash_title_dict.keys():
filtered_image_hash_dict[image_name] = hash_title_dict[image_name]
return filtered_image_hash_dict
| 33.065217 | 82 | 0.721893 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 17 | 0.011177 |
348a2b80711dded6c85af5b308b4122d1e73013c
| 14,635 |
py
|
Python
|
flask_app.py
|
wacovidhackathon/TPCraze
|
57a9485a008536f6326e29328df6af6d29045a6d
|
[
"MIT"
] | null | null | null |
flask_app.py
|
wacovidhackathon/TPCraze
|
57a9485a008536f6326e29328df6af6d29045a6d
|
[
"MIT"
] | null | null | null |
flask_app.py
|
wacovidhackathon/TPCraze
|
57a9485a008536f6326e29328df6af6d29045a6d
|
[
"MIT"
] | null | null | null |
import time, os
import tomtomSearch
from flask_bootstrap import Bootstrap
from flask import Flask, render_template, request, redirect, flash, session, jsonify, url_for
from flask_wtf import FlaskForm
from wtforms import StringField, BooleanField, SubmitField, IntegerField, SelectField, RadioField
from wtforms.validators import DataRequired
from datetime import datetime
from flaskext.mysql import MySQL
from pytz import timezone
'''
TODO:
-add so user can input address as well
- fix the full page 2 columns
- add information about the store
- integrate the user
- authenticate the managers
'''
class LocationForm(FlaskForm):
address = StringField('Address', validators=[DataRequired()])
item_option = SelectField('item_option', choices=[('1', 'Toilet Paper'), ('2', 'Hand Sanitizer')])
distance_option = SelectField('distance_option',
choices=[('1', '1 km'), ('5', '5 km'), ('10', '10 km'),
('15', '15 km'), ('20', '20 km')])
submit = SubmitField('Search')
class StatusForm(FlaskForm):
status_option = SelectField('status_option',
choices=[('1', 'Full Stock'), ('2', 'Majority Remaining'), ('3', 'Half Remaining'),
('4', 'Few Remaining'), ('5', 'None Remaining')])
item_option = SelectField('item_option', choices=[('1', 'Toilet Paper'), ('2', 'Hand Sanitizer')])
submit = SubmitField('Submit Status')
class StoreForm(FlaskForm):
stores = RadioField('stores', choices=[])
submit = SubmitField('View')
class optionForm(FlaskForm):
find_item = SubmitField('Find an Item')
provide_status = SubmitField('Provide Status')
# declaring app name
app = Flask(__name__)
mysql = MySQL()
# MySQL configurations
app.config['MYSQL_DATABASE_USER'] = os.environ["MYSQL_USER"]
app.config['MYSQL_DATABASE_PASSWORD'] = os.environ["MYSQL_PW"]
app.config['MYSQL_DATABASE_DB'] = os.environ["MYSQL_DB"]
app.config['MYSQL_DATABASE_HOST'] = os.environ["MYSQL_URL"]
mysql.init_app(app)
def getStore(latitude, longitude): # get stores from coordinates
db = mysql.connect()
cursor = db.cursor()
store = []
ids = []
addresses = []
for i in range(len(latitude)):
query = 'SELECT name FROM all_stores WHERE lat = ' + str(latitude[i]) + ' AND lon = ' + str(longitude[i]) + ';'
cursor.execute(query)
data_store = cursor.fetchall()
query = 'SELECT id FROM all_stores WHERE lat = ' + str(latitude[i]) + ' AND lon = ' + str(longitude[i]) + ';'
cursor.execute(query)
data_id = cursor.fetchall()
query = 'SELECT freeFormAddress FROM all_stores WHERE lat = ' + str(latitude[i]) + ' AND lon = ' + str(
longitude[i]) + ';'
cursor.execute(query)
data_address = cursor.fetchall()
# rcount = len(data)
# print(rcount)
if (len(data_store) != 0):
store.append(data_store[0][0])
ids.append(data_id[0][0])
addresses.append((data_address[0][0]))
cursor.close()
db.close()
return store, ids, addresses
def getItemStatus(selected_item, store_id, num_to_average): #get the status of the selected item using moving average
db = mysql.connect()
cursor = db.cursor()
query = "SELECT rating FROM status_list WHERE id = '" + str(store_id) + "' AND item = " + str(selected_item) +";"
cursor.execute(query)
status_values = []
status = cursor.fetchall()
moving_average = 0
for x in range(len(status)):
i = len(status) - x - 1
status_values.append(5-(status[i][0])+1)
if len(status_values) != 0:
for i in range(min(len(status_values),num_to_average)):
moving_average += status_values[i]
moving_average = moving_average/min(num_to_average, len(status_values))
cursor.close()
db.close()
return round(moving_average)
def getManagerList(raw_manager):
manager_lst = []
for x in range(len(raw_manager)):
i = len(raw_manager) - x - 1
manager_lst.append(raw_manager[i][0])
return manager_lst
def parseMessage(store, raw_item, raw_rating, raw_date, raw_user): # get status messages
messages = []
type = []
rating_choices = ['Full Stock', 'Majority Remaining', 'Half Remaining', 'Few Remaining', 'None Remaining']
item_choices = ['Toilet Paper', 'Hand Sanitizer']
color_array = []
for x in range(len(raw_rating)):
i = len(raw_rating) - x - 1
if raw_user[i][0] == None:
new_message = '' + raw_date[i][0] + ' Status of ' + item_choices[
raw_item - 1] + ' at ' + store + ': ' + rating_choices[int(raw_rating[i][0]) - 1]
else:
new_message = '' + raw_date[i][0] + ' Status of ' + item_choices[raw_item - 1] + ' at ' + store + ': ' + rating_choices[int(raw_rating[i][0]) - 1] + " - " + raw_user[i][0]
messages.append(new_message)
color_array.append(int(raw_rating[i][0]))
type.append(int(raw_item))
return messages, color_array, type
def getAddress(address): # get basic store information for landing page
message = 'Address: ' + address
return message
def getPhone(phone):
phone_formatted = ''
if len(phone)>0:
phone_formatted = phone[5:14] + '-' + phone[14:18]
else:
phone_formatted = 'Unavailable'
message = 'Phone: ' + phone_formatted
return message
def getItem(key):
items = {'1': 'Toilet Paper', '2': 'Hand Sanitizer'}
return items[key]
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
data = request.get_json()
session['user'] = data['user_email']
session['user_store_id'] = data['store_id']
return "Forbidden!"
@app.route('/location', methods=['GET', 'POST'])
def location():
form = LocationForm()
if form.validate_on_submit():
flash('Item requested from the user {}'.format(form.item_option.data))
flash('Item requested from the user {}'.format(form.distance_option.data))
flash('Item requested from the user {}'.format(form.address.data))
session['selected_item'] = form.item_option.data
session['distance'] = form.distance_option.data
user_lat, user_lon = tomtomSearch.geo(form.address.data)
lat_lst, lon_lst = tomtomSearch.search(user_lat, user_lon,1000*int(session.get('distance')),10*int(session.get('distance')))
stores, ids, addresses = getStore(lat_lst, lon_lst)
session['stores'] = []
session['ids'] = []
session['addresses'] = []
session['stores'] = stores
session['ids'] = ids
session['addresses'] = addresses
return redirect('/store')
return render_template('location.html', title='Location', form = form)
@app.route('/store', methods=['GET', 'POST'])
def stores():
form = StoreForm()
if request.method == 'POST':
#flash('Status requested from the user {}'.format(form.stores.data))
option = int(request.form['options'])
print(session.get('stores'))
session['selected_store'] = session.get('stores')[option]
session['selected_id'] = session.get('ids')[option]
return redirect('/item-status')
status_values = []
radio = {}
all_stores = []
store_info = {}
store_count = []
for i in range(len(session.get('stores'))): # append radio button options
cur_status = getItemStatus(session.get('selected_item'), session.get('ids')[i], 1)
if session.get('selected_option') == 'find' and cur_status==0:
continue
else:
#form.stores.choices.append((str(i), (session.get('stores')[i] + ' - ' + session.get('addresses')[i])))
all_stores.append((session.get('stores')[i] + ' - ' + session.get('addresses')[i]))
store_info[i] =[session.get('stores')[i],session.get('ids')[i],session.get('addresses')[i]]
#radio[i] = str(session.get('stores')[i] + ' - ' + session.get('addresses')[i])
store_count.append(i)
status_values.append(cur_status)
#form.stores.choices = [x for _, x in sorted(zip(status_values, form.stores.choices), reverse=True)]
all_stores = [x for _, x in sorted(zip(status_values, all_stores), reverse=True)]
store_count = [x for _, x in sorted(zip(status_values, store_count), reverse=True)]
sorted_info = []
for i in range(len(store_count)):
sorted_info.append(store_info[store_count[i]])
session['stores'] = [item[0] for item in sorted_info]
session['ids'] = [item[1] for item in sorted_info]
session['addresses'] = [item[2] for item in sorted_info]
status_values.sort(reverse=True)
print(session.get('stores'))
for i in range(len(status_values)):
s = str(session.get('stores')[i] + ' - ' + session.get('addresses')[i])
radio[i] = s
form.stores.choices.append((str(i), s))
storeFound = True
print(radio)
if len(status_values) == 0:
storeFound = False
status_types = ['Full Stock', 'Majority Remaining', 'Half Remaining','Few Remaining', 'None Remaining']
return render_template("store.html", type_query = session.get('selected_option'),storeFound = storeFound,status_types=status_types, dist = int(session.get('distance')),len=len(form.stores.choices), form=form, status_values=status_values, radio = radio, selected_item_index = int(session.get('selected_item')), selected_item_name = getItem(session.get('selected_item')))
@app.route('/item-status', methods=['GET', 'POST'])
def status():
status_form = StatusForm()
if request.method == 'POST':
user_email = ' '
print()
if session.get('user') == '':
return redirect("/item-status")
db = mysql.connect()
cursor = db.cursor()
user_email = session.get('user')
flash('Status requested from the user {}'.format(status_form.status_option.data))
#flash('Status requested from the user {}'.format(status_form.item_option.data))
tz = timezone('US/Eastern')
now = datetime.now(tz)
date_now = now.strftime("%m/%d/%Y %H:%M:%S")
# add manager
query = ''
if session.get('user_store_id') == session.get('selected_id'):
query = "INSERT INTO status_list(date, item, rating, manager, store, id, user) VALUES('" + date_now + "'," + session.get('selected_item') + "," + status_form.status_option.data + ", 1, '" + session['selected_store'] + "', '" + session['selected_id'] + "','"+ user_email+"');"
else:
query = "INSERT INTO status_list(date, item, rating, manager, store, id, user) VALUES('" + date_now + "'," + session.get('selected_item') + "," + status_form.status_option.data + ", 0, '" + session['selected_store'] + "', '" + session['selected_id'] + "','"+ user_email+"');"
cursor.execute(query)
cursor.execute("COMMIT;")
time.sleep(0.5)
cursor.close()
db.close()
return redirect('/item-status')
db = mysql.connect()
cursor = db.cursor()
get_query = "SELECT rating FROM status_list WHERE item = " + session.get('selected_item') +" AND id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_rating = cursor.fetchall()
get_query = "SELECT date FROM status_list WHERE item = " + session.get('selected_item') +" AND id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_date = cursor.fetchall()
get_query = "SELECT user FROM status_list WHERE item = " + session.get('selected_item') +" AND id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_user = cursor.fetchall()
get_query = "SELECT manager FROM status_list WHERE item = " + session.get('selected_item') +" AND id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_manager = cursor.fetchall()
# get basic store info
get_query = "SELECT phone FROM all_stores WHERE id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_phone = cursor.fetchall()
get_query = "SELECT freeFormAddress FROM all_stores WHERE id = '" + session['selected_id'] + "';"
cursor.execute(get_query)
raw_address = cursor.fetchall()
messages, colors, type_item = parseMessage(session['selected_store'], int(session.get('selected_item')), raw_rating, raw_date, raw_user)
managers = getManagerList(raw_manager)
basic_info = []
basic_info.append(getAddress(raw_address[0][0]))
basic_info.append(getPhone(raw_phone[0][0]))
#user_email = request.get_json()
cursor.close()
db.close()
if session.get('user') == '':
return render_template("status.html", managers = managers,type_query = session.get('selected_option'),signIn=0, store=session['selected_store'], form=status_form,
messages=messages,
len=len(messages), colors=colors, type_item=type_item, basic_info=basic_info,
selected_item=getItem(session.get('selected_item')))
else:
return render_template("status.html", managers = managers, type_query = session.get('selected_option'), signIn = 1, store=session['selected_store'], form=status_form, messages=messages,
len=len(messages), colors=colors, type_item=type_item, basic_info=basic_info, selected_item = getItem(session.get('selected_item')))
@app.route('/index', methods=['GET', 'POST'])
@app.route('/', methods=['GET', 'POST'])
def homepage():
session['stores'] = []
session['ids'] = []
session['addresses'] = []
session['has_enabled'] = 'disabled'
session['distance'] = 10000
form = optionForm()
if form.validate_on_submit():
if form.find_item.data:
session['selected_option'] = 'find'
return redirect('/location')
elif form.provide_status.data:
session['selected_option'] = 'status'
return redirect('/location')
return render_template("index.html", form = form)
if __name__ == '__main__':
app.run(use_reloader=True, debug=True)
| 38.111979 | 374 | 0.610386 | 1,124 | 0.076802 | 0 | 0 | 8,789 | 0.600547 | 0 | 0 | 3,962 | 0.270721 |
348c6299bee8c546cf7aa75e9c41522b146fab11
| 2,634 |
py
|
Python
|
pkg/codegen/internal/test/testdata/output-funcs/py_tests/pulumi_py_tests.py
|
sticha/pulumi
|
76ee1b8ccfee815eb315d9e0e0ddfaaf505c472b
|
[
"Apache-2.0"
] | null | null | null |
pkg/codegen/internal/test/testdata/output-funcs/py_tests/pulumi_py_tests.py
|
sticha/pulumi
|
76ee1b8ccfee815eb315d9e0e0ddfaaf505c472b
|
[
"Apache-2.0"
] | null | null | null |
pkg/codegen/internal/test/testdata/output-funcs/py_tests/pulumi_py_tests.py
|
sticha/pulumi
|
76ee1b8ccfee815eb315d9e0e0ddfaaf505c472b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016-2021, Pulumi Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These are copied from pulumi-azure-native or hand-written to
# compensate for an incomplete codegen test setup, we could fix the
# test to code-gen this from schema.
from collections import namedtuple
import pulumi
@pulumi.output_type
class StorageAccountKeyResponse(dict):
"""
An access key for the storage account.
"""
def __init__(__self__, *,
creation_time: str,
key_name: str,
permissions: str,
value: str):
"""
An access key for the storage account.
:param str creation_time: Creation time of the key, in round trip date format.
:param str key_name: Name of the key.
:param str permissions: Permissions for the key -- read-only or full permissions.
:param str value: Base 64-encoded value of the key.
"""
pulumi.set(__self__, "creation_time", creation_time)
pulumi.set(__self__, "key_name", key_name)
pulumi.set(__self__, "permissions", permissions)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> str:
"""
Creation time of the key, in round trip date format.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> str:
"""
Name of the key.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter
def permissions(self) -> str:
"""
Permissions for the key -- read-only or full permissions.
"""
return pulumi.get(self, "permissions")
@property
@pulumi.getter
def value(self) -> str:
"""
Base 64-encoded value of the key.
"""
return pulumi.get(self, "value")
CodegenTest = namedtuple('CodegenTest', ['outputs'])
Outputs = namedtuple('Outputs', ['StorageAccountKeyResponse'])
outputs = Outputs(StorageAccountKeyResponse)
codegentest = CodegenTest(outputs)
| 30.988235 | 89 | 0.65186 | 1,603 | 0.60858 | 0 | 0 | 1,623 | 0.616173 | 0 | 0 | 1,565 | 0.594153 |
348cdede6c52f1578247987d1f2b6285625e722a
| 1,800 |
py
|
Python
|
scripts/gan/conditional_dcgan/gen_data_from_img.py
|
hiroyasuakada/ros_start
|
10221ad2bcaefa4aaadc6c90424a3751126ac256
|
[
"MIT"
] | null | null | null |
scripts/gan/conditional_dcgan/gen_data_from_img.py
|
hiroyasuakada/ros_start
|
10221ad2bcaefa4aaadc6c90424a3751126ac256
|
[
"MIT"
] | null | null | null |
scripts/gan/conditional_dcgan/gen_data_from_img.py
|
hiroyasuakada/ros_start
|
10221ad2bcaefa4aaadc6c90424a3751126ac256
|
[
"MIT"
] | null | null | null |
# generate data from bag images
from PIL import Image
from pathlib import Path
import os, glob # manipulate file or directory
import numpy as np
class DataArrangement(object):
def __init__(self):
self.path = Path(__file__).parent
self.current_directories = ['not_traking', 'traking']
self.X_not_traking = []
self.Y_not_traking = []
self.X_traking = []
self.Y_traking = []
def load_data(self):
for current_directory in self.current_directories:
print(current_directory) # not traking or traking
self.path /= '../../video_to_image/{}'.format(current_directory)
directories = os.listdir(self.path)
for i, directory in enumerate(directories):
print('{}, {}'.format(i, directory))
files = glob.glob(str(self.path.resolve()) + '/{}/*.jpg'.format(directory))
for j, file in enumerate(files):
image = Image.open(file)
image = image.convert('RGB')
# image = image.resize(50, 50)
data = np.asarray(image)
print('{} - {}'.format(i, j))
if current_directory == 'not_traking': # section off files by directory name
self.X_not_traking.append(data)
self.Y_not_traking.append(i)
else:
self.X_traking.append(data)
self.Y_traking.append(i)
return np.array(self.X_not_traking), np.array(self.Y_not_traking), \
np.array(self.X_traking), np.array(self.Y_traking)
if __name__ == '__main__':
DA = DataArrangement()
X_not_traking, Y_not_traking, X_traking, Y_traking = DA.load_data()
| 36.734694 | 97 | 0.566111 | 1,522 | 0.845556 | 0 | 0 | 0 | 0 | 0 | 0 | 255 | 0.141667 |
348d2d60fb5385e97d0dc27346784f7b73fdadac
| 331 |
py
|
Python
|
example/demo/book/views.py
|
iwwxiong/flask_restapi
|
57fca3bf07d913b31b6b7ef877328b0e07056c39
|
[
"MIT"
] | 6 |
2019-04-23T02:18:55.000Z
|
2019-12-10T13:16:21.000Z
|
example/demo/book/views.py
|
dracarysX/flask_scaffold
|
57fca3bf07d913b31b6b7ef877328b0e07056c39
|
[
"MIT"
] | null | null | null |
example/demo/book/views.py
|
dracarysX/flask_scaffold
|
57fca3bf07d913b31b6b7ef877328b0e07056c39
|
[
"MIT"
] | 3 |
2019-05-22T06:00:17.000Z
|
2020-01-14T17:02:35.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# flask_restapi import
from flask_restapi.views import APIMethodView
from .models import Book
from .forms import BookForm
class BookView(APIMethodView):
model = Book
paginate_by = 10
context_object_name = 'items'
pk_url_kwarg = 'book_id'
form_class = BookForm
| 19.470588 | 45 | 0.716012 | 158 | 0.477341 | 0 | 0 | 0 | 0 | 0 | 0 | 83 | 0.250755 |
348dadc81585ed38d6f51d902f482767add11796
| 2,484 |
py
|
Python
|
backend/apps/cmdb/models/field.py
|
codelieche/erp
|
96861ff63a63a93918fbd5181ffb2646446d0eec
|
[
"MIT"
] | null | null | null |
backend/apps/cmdb/models/field.py
|
codelieche/erp
|
96861ff63a63a93918fbd5181ffb2646446d0eec
|
[
"MIT"
] | 29 |
2020-06-05T19:57:11.000Z
|
2022-02-26T13:42:36.000Z
|
backend/apps/cmdb/models/field.py
|
codelieche/erp
|
96861ff63a63a93918fbd5181ffb2646446d0eec
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
from django.db import models
from account.models import User
# from cmdb.types import get_instance
from cmdb.models import Model
class Field(models.Model):
"""
资产模型的字段
"""
code = models.SlugField(verbose_name="字段", max_length=40)
name = models.CharField(verbose_name="字段(中文名)", max_length=60, blank=True, null=True)
model = models.ForeignKey(verbose_name="模型", related_name="fields", to=Model, on_delete=models.CASCADE)
# 字段的类型:
# 比如是:ChartField,TextField
# 数值类型:IntField、FloatField、
# 其它类型:BooleanField、IPField、DateField、DateTimeField
type = models.CharField(verbose_name="字段类型", default="ChartField", max_length=20, blank=True)
# 是否允许空值
allow_null = models.BooleanField(verbose_name="允许空值", default=False, blank=True)
# 是否创建索引、是否唯一、是否多值
db_index = models.BooleanField(verbose_name="使用索引", default=False, blank=True)
unique = models.BooleanField(verbose_name="是否唯一", default=False, blank=True)
multi = models.BooleanField(verbose_name="是否多值", blank=True, default=False)
# 字段类型的自定义配置选项
# 比如:IntField的:min、max;ChartField的prefix、suffix
option = models.JSONField(verbose_name="选项设置", blank=True, null=True)
# 还其它的元数据信息,联合索引,默认排序等都可写入到这里
meta = models.JSONField(verbose_name="元数据信息", blank=True, null=True)
description = models.CharField(verbose_name="描述", blank=True, null=True, max_length=256)
user = models.ForeignKey(verbose_name="用户", to=User, blank=True, null=True, on_delete=models.SET_NULL)
deleted = models.BooleanField(verbose_name="删除", blank=True, default=False)
time_added = models.DateTimeField(verbose_name="添加时间", blank=True, auto_now_add=True)
# 会循环引入模块了,故把校验抽离出去
# def validate_value(self, value, stringify=False):
# """
# 校验字段的值,并返回其类型的值
# """
# instance = get_instance(self.type, self.option)
# if instance:
# if stringify:
# v = instance.stringify(value=value)
# else:
# v = instance.destringify(value=value)
# return v
# else:
# return ValueError('校验字段的值出错:未找到类型实例')
class Meta:
verbose_name = "资产模型字段"
verbose_name_plural = verbose_name
unique_together = ('model', 'code')
def delete(self, using=None, keep_parents=False):
if self.deleted:
return
else:
self.deleted = True
self.save(update_fields=('deleted',))
| 38.8125 | 107 | 0.667069 | 2,738 | 0.946095 | 0 | 0 | 0 | 0 | 0 | 0 | 1,242 | 0.429164 |
3490cf6f7075a2aef1693d69a9521b0b995d567b
| 604 |
py
|
Python
|
unidad2/pc2/e1.py
|
upc-projects/cc76
|
0f1663cc439889b0c7e924923639e7c2e032b9b6
|
[
"MIT"
] | 1 |
2020-09-21T16:56:24.000Z
|
2020-09-21T16:56:24.000Z
|
unidad2/pc2/e1.py
|
upc-projects/cc76
|
0f1663cc439889b0c7e924923639e7c2e032b9b6
|
[
"MIT"
] | null | null | null |
unidad2/pc2/e1.py
|
upc-projects/cc76
|
0f1663cc439889b0c7e924923639e7c2e032b9b6
|
[
"MIT"
] | null | null | null |
import math
def Bellman_ford(G):
n = len(G)
distance = [-math.inf]*n
parents = [None]* n
distance[0] = 0
for _ in range(n-1):
for u in range(n):
for v, w in G[u]:
if distance[v-1] < distance[u-1] + w:
distance[v-1] = distance[u-1] + w
parents[v-1] = u
return distance ,parents
G = [[(2,0)],
[(0,20), (3,20)],
[(4,-60)],
[(5,-60)],
[]]
win = True
distance, _ = Bellman_ford(G)
for i in distance:
if i <= -100:
win=False
print("winnable" if win else "hopeless")
| 18.875 | 53 | 0.463576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.033113 |
3491846c1c297a3eafd5e93b910d0c66155d9336
| 1,743 |
py
|
Python
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/exporter/markdown/markdown_check_data.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 1 |
2021-07-24T17:22:50.000Z
|
2021-07-24T17:22:50.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/exporter/markdown/markdown_check_data.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2 |
2022-02-28T03:40:31.000Z
|
2022-02-28T03:40:52.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/exporter/markdown/markdown_check_data.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2 |
2022-02-25T08:34:51.000Z
|
2022-03-16T17:29:44.000Z
|
from django.contrib import messages
from dfirtrack_config.models import SystemExporterMarkdownConfigModel
from dfirtrack_main.logger.default_logger import warning_logger
import os
def check_config(request):
""" check variables in config """
# get config model
model = SystemExporterMarkdownConfigModel.objects.get(system_exporter_markdown_config_name = 'SystemExporterMarkdownConfig')
# reset stop condition
stop_exporter_markdown = False
# check MARKDOWN_PATH for empty string
if not model.markdown_path:
messages.error(request, "`MARKDOWN_PATH` contains an emtpy string. Check config!")
# call logger
warning_logger(str(request.user), " EXPORTER_MARKDOWN variable MARKDOWN_PATH empty string")
stop_exporter_markdown = True
# check MARKDOWN_PATH for existence in file system (check only if it actually is a non-empty string)
if model.markdown_path:
if not os.path.isdir(model.markdown_path):
messages.error(request, "`MARKDOWN_PATH` does not exist in file system. Check config or filesystem!")
# call logger
warning_logger(str(request.user), " EXPORTER_MARKDOWN path MARKDOWN_PATH not existing")
stop_exporter_markdown = True
# check MARKDOWN_PATH for write permission (check only if it actually is a non-empty string)
if model.markdown_path:
if not os.access(model.markdown_path, os.W_OK):
messages.error(request, "`MARKDOWN_PATH` is not writeable. Check config or filesystem!")
# call logger
warning_logger(str(request.user), " EXPORTER_MARKDOWN path MARKDOWN_PATH not writeable")
stop_exporter_markdown = True
return stop_exporter_markdown
| 44.692308 | 128 | 0.729776 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 729 | 0.418244 |
3491ecdf0d743d67d90929ff69bd755f765fc9ba
| 44,309 |
py
|
Python
|
src/module_factions.py
|
faycalki/medieval-conquests
|
113e13e2b166b79517c14f2c13f7561307a89f75
|
[
"MIT"
] | 7 |
2019-08-11T14:20:20.000Z
|
2021-11-21T06:48:24.000Z
|
src/module_factions.py
|
faycalki/medieval-conquests
|
113e13e2b166b79517c14f2c13f7561307a89f75
|
[
"MIT"
] | null | null | null |
src/module_factions.py
|
faycalki/medieval-conquests
|
113e13e2b166b79517c14f2c13f7561307a89f75
|
[
"MIT"
] | null | null | null |
# -*- coding: utf8 -*-
from header_factions import *
####################################################################################################################
# Each faction record contains the following fields:
# 1) Faction id: used for referencing factions in other files.
# The prefix fac_ is automatically added before each faction id.
# 2) Faction name.
# 3) Faction flags. See header_factions.py for a list of available flags
# 4) Faction coherence. Relation between members of this faction.
# 5) Relations. This is a list of relation records.
# Each relation record is a tuple that contains the following fields:
# 5.1) Faction. Which other faction this relation is referring to
# 5.2) Value: Relation value between the two factions.
# Values range between -1 and 1.
# 6) Ranks
# 7) Faction color (default is gray)
####################################################################################################################
factions = [
("no_faction", "No Faction", 0, 0.9, []),
("commoners", "Commoners", 0, 0.1, [("forest_bandits",-0.20),("player_faction",0.10),("mountain_bandits",-0.20),("undeads",-0.70),("outlaws",-0.60)]),
("outlaws", "Outlaws", 0, 0.5, [("kingdom_9",-0.05),("kingdom_15",-0.05),("kingdom_23",-0.05),("kingdom_3",-0.05),("kingdom_24",-0.05),("kingdom_10",-0.05),("kingdom_1",-0.05),("kingdom_20",-0.05),("kingdom_13",-0.05),("kingdom_19",-0.05),("kingdom_36",-0.05),("kingdom_11",-0.05),("player_supporters_faction",-0.05),("kingdom_34",-0.05),("kingdom_37",-0.05),("kingdom_25",-0.05),("kingdom_8",-0.05),("kingdom_5",-0.05),("kingdom_42",-0.05),("kingdom_2",-0.05),("kingdom_31",-0.05),("merchants",-0.50),("kingdom_22",-0.05),("kingdom_32",-0.05),("innocents",-0.05),("kingdom_35",-0.05),("player_faction",-0.15),("kingdom_18",-0.05),("kingdom_26",-0.05),("papacy",-0.05),("kingdom_38",-0.05),("kingdom_28",-0.05),("crusade",-0.05),("kingdom_39",-0.05),("kingdom_30",-0.05),("manhunters",-0.60),("kingdom_7",-0.05),("kingdom_16",-0.05),("kingdom_6",-0.05),("kingdom_12",-0.05),("kingdom_33",-0.05),("kingdom_40",-0.05),("kingdom_4",-0.05),("kingdom_29",-0.05),("commoners",-0.60),("kingdom_41",-0.05),("kingdom_14",-0.05),("kingdom_17",-0.05),("kingdom_27",-0.05)], [], 0x00888888),
("neutral", "Neutral", 0, 0.1, [], [], 0x00ffffff),
("innocents", "Innocents", ff_always_hide_label, 0.5, [("outlaws",-0.05),("dark_knights",-0.90)]),
("merchants", "Merchants", ff_always_hide_label, 0.5, [("forest_bandits",-0.50),("deserters",-0.50),("mountain_bandits",-0.50),("outlaws",-0.50)]),
("dark_knights", "{!}Dark Knights", 0, 0.5, [("innocents",-0.90),("player_faction",-0.40)]),
("culture_finnish", "{!}culture finnish", 0, 0.9, []),
("culture_mazovian", "{!}culture mazovian", 0, 0.9, []),
("culture_serbian", "{!}culture serbian", 0, 0.9, []),
("culture_welsh", "{!}culture welsh", 0, 0.9, []),
("culture_teutonic", "{!}culture teutonic", 0, 0.9, []),
("culture_balkan", "{!}culture balkan", 0, 0.9, []),
("culture_rus", "{!}culture rus", 0, 0.9, []),
("culture_nordic", "{!}culture nordic", 0, 0.9, []),
("culture_baltic", "{!}culture baltic", 0, 0.9, []),
("culture_marinid", "{!}culture marinid", 0, 0.9, []),
("culture_mamluke", "{!}culture mamluke", 0, 0.9, []),
("culture_byzantium", "{!}culture byzantium", 0, 0.9, []),
("culture_iberian", "{!}culture iberian", 0, 0.9, []),
("culture_italian", "{!}culture italian", 0, 0.9, []),
("culture_andalus", "{!}culture andalus", 0, 0.9, []),
("culture_gaelic", "{!}culture gaelic", 0, 0.9, []),
("culture_anatolian_christian", "{!}culture anatolian", 0, 0.9, []),
("culture_anatolian", "{!}culture anatolian", 0, 0.9, []),
("culture_scotish", "{!}culture scotish", 0, 0.9, []),
("culture_western", "{!}culture western", 0, 0.9, []),
("culture_mongol", "{!}culture mongol", 0, 0.9, []),
("player_faction", "Player Faction", 0, 0.9, [("black_khergits",-0.30),("player_supporters_faction",1.00),("peasant_rebels",-0.40),("forest_bandits",-0.15),("manhunters",0.10),("deserters",-0.10),("mountain_bandits",-0.15),("undeads",-0.50),("outlaws",-0.15),("dark_knights",-0.40),("commoners",0.10)], [], 0x00cccccc),
("player_supporters_faction", "Player's Supporters", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("player_faction",1.00),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00468493),
("kingdom_1", "Teutonic Order", 0, 0.9, [("black_khergits",-0.02),("kingdom_8",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("kingdom_6",0.50),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_75",-0.50),("kingdom_76",-0.50),("kingdom_77",-0.50),("kingdom_78",-0.50),("kingdom_4",0.10),("kingdom_14",0.10),("kingdom_43",-40.00)], [], 0x00e9e9e9),
("kingdom_2", "Kingdom of Lithuania", 0, 0.9, [("black_khergits",-0.02),("kingdom_3",0.10),("kingdom_36",0.50),("kingdom_34",0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_35",0.50),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_33",0.50)], [], 0x00badeb2),
("kingdom_3", "Golden Horde", 0, 0.9, [("kingdom_8",0.10),("kingdom_5",-1.00),("kingdom_2",0.10),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("kingdom_7",-1.00),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00a33e32),
("kingdom_4", "Kingdom of Denmark", 0, 0.9, [("kingdom_1",0.10),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x009b1a1a),
("kingdom_5", "Polish Principalities", 0, 0.9, [("kingdom_3",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("kingdom_7",0.10),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00ff0000),
("kingdom_6", "Holy Roman Empire", 0, 0.9, [("kingdom_1",0.50),("kingdom_42",1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_38",0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_41",1.00)], [], 0x00ffcc00),
("kingdom_7", "Kingdom of Hungary", 0, 0.9, [("kingdom_3",-1.00),("kingdom_5",0.10),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00289327),
("kingdom_8", "Novgorod Republic", 0, 0.9, [("kingdom_3",0.10),("kingdom_1",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_14",-0.20)], [], 0x009e0b6f),
("kingdom_9", "Kingdom of England", 0, 0.9, [("kingdom_37",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("kingdom_79",-0.50),("outlaws",-0.05)], [], 0x00931124),
("kingdom_10", "Kingdom of France", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00002395),
("kingdom_11", "Kingdom of Norway", 0, 0.9, [("kingdom_13",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_12",-0.20)], [], 0x006669d6),
("kingdom_12", "Kingdom of Scotland", 0, 0.9, [("kingdom_11",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0022d8a7),
("kingdom_13", "Gaelic Kingdoms", 0, 0.9, [("kingdom_11",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0077b322),
("kingdom_14", "Kingdom of Sweden", 0, 0.9, [("kingdom_1",0.10),("kingdom_8",-0.20),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x003254b5),
("kingdom_15", "Kingdom of Halych-Volhynia", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00ece874),
("kingdom_16", "Kingdom of Portugal", 0, 0.9, [("kingdom_20",-40.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00003399),
("kingdom_17", "Crown of Aragon", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0007b233),
("kingdom_18", "Crown of Castile", 0, 0.9, [("kingdom_20",-40.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00d85ac4),
("kingdom_19", "Kingdom of Navarre", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00f7f497),
("kingdom_20", "Emirate of Granada", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_18",-40.00),("crusade",-0.50),("deserters",-0.02),("kingdom_16",-40.00),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00abc904),
("papacy", "Papal States", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00fff17a),
("kingdom_22", "Byzantine Empire", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_26",-1.00),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00760d0d),
("kingdom_23", "Crusader States", 0, 0.9, [("kingdom_25",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_27",0.10)], [], 0x00f3efb8),
("kingdom_24", "Kingdom of Sicily", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00799cb5),
("kingdom_25", "Mamluk Sultanate", 0, 0.9, [("kingdom_23",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_27",-1.00)], [], 0x00ebe800),
("kingdom_26", "Latin Empire", 0, 0.9, [("peasant_rebels",-0.10),("kingdom_22",-1.00),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00b26248),
("kingdom_27", "Ilkhanate", 0, 0.9, [("kingdom_23",0.10),("kingdom_25",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00e19004),
("kingdom_28", "Hafsid Dynasty", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00a48460),
("kingdom_29", "Kingdom of Serbia", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00b38263),
("kingdom_30", "Bulgarian Empire", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0076a296),
("kingdom_31", "Marinid Dynasty", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00c1272d),
("kingdom_32", "Republic of Venice", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00c1172d),
("kingdom_33", "Yotvingians", 0, 0.9, [("kingdom_2",0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x003e7583),
("kingdom_34", "Prussians", 0, 0.9, [("kingdom_2",0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0065c0d7),
("kingdom_35", "Curonians", 0, 0.9, [("kingdom_2",0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x003e7583),
("kingdom_36", "Samogitians", 0, 0.9, [("kingdom_2",0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00529cae),
("kingdom_37", "Principality of Wales", 0, 0.9, [("kingdom_9",-1.00),("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0000dc00),
("kingdom_38", "Republic of Genoa", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("crusade",-0.50),("kingdom_39",-0.50),("deserters",-0.02),("kingdom_6",0.50),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00e1900a),
("kingdom_39", "Republic of Pisa", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_38",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x0007e233),
("kingdom_40", "Guelphs", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_41",-0.80)], [], 0x003254e5),
("kingdom_41", "Ghibellines", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("kingdom_6",1.00),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_40",-0.80)], [], 0x009e026a),
("kingdom_42", "Kingdom of Bohemia", 0, 0.9, [("peasant_rebels",-0.10),("forest_bandits",-0.05),("deserters",-0.02),("kingdom_6",1.00),("mountain_bandits",-0.05),("outlaws",-0.05)], [], 0x00e8e8e8),
#####Kaos Safe Begin
#MAUAL EDITS REQUIRED
#####Kaos begin add factions correct major faction, also fix the colors for the factions, especially for the start as king/prince, as well as for the rebels/civ.
#Module factions Search for this line ("kingdoms_end","{!}kingdoms_end", 0, 0,[], []), and place these lines above it
#KAOS (POLITICAL)
#Begin rebels (They are only named this until they win the war, if they win the war) must be double amount of regular factions to test for simple_trigger errors, empire too. *42-84
("kingdom_43", "Teutonics-Prussians", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05),("kingdom_1", -40.00)], [], 0x00888888), #0x00c9c9c9 #Change colors here, as well as relations 0XCC9900 is color, ("forest_bandits", -0.05) means -5 relations against forest bandits, must apply to both factions.
("kingdom_44", "Wendish Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00698064),
("kingdom_45", "Pecheng Clan", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00813229),
("kingdom_46", "Kingdom of Scandinavia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x008a372d),
("kingdom_47", "Pomeralia Principalities", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00872f2f),
("kingdom_48", "Empire of Germania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x008c7001),
("kingdom_49", "Empire of Carpathia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00278c25),
("kingdom_50", "Kievan Rus", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006555d6), #0x00E714A3
("kingdom_51", "Kingdom of Brittania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006a1c12),
("kingdom_52", "New Francia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00243777),
("kingdom_53", "Kingdom of Norge", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x000f1d67),
("kingdom_54", "Chiefdom of Kemi", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00105a46),
("kingdom_55", "Kingdom of Ireland", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x0048720e),
("kingdom_56", "Chiefdom of Sapmi", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00314375),
("kingdom_57", "Kingdom of Galicia-Volhynia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00737013),
("kingdom_58", "Hispania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00002b80),
("kingdom_59", "Principality of Catalonia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00035218),
("kingdom_60", "Crown of Leon", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00572850),
("kingdom_61", "Kingdom of Vasconia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006d6b44),
("kingdom_62", "Ahlidid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00485500),
("kingdom_63", "Union of Papacy", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00817408),
("kingdom_64", "Eastern Roman Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00440000),
("kingdom_65", "Kingdom of Armenia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00858369),
("kingdom_66", "Kingdom of Apulia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x002c4557),
("kingdom_67", "Hashimid Emirate", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006f6d00),
("kingdom_68", "Empire of Aegean", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x008c5341),
("kingdom_69", "Kingdom of Georgia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00ed370b), #Red
("kingdom_70", "Maghrawavid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006F6457),
("kingdom_71", "Kingdom of Epirius", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00338681),
("kingdom_72", "Kingdom of Karvuna-Moesia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00394f49),
("kingdom_73", "Zayanid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00803739),
("kingdom_74", "Kingdom of Croatia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00881184),
("kingdom_75", "Middle Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_78", 0.85),("kingdom_76", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x00566f75),
("kingdom_76", "West Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_78", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x0048DAFF),
("kingdom_77", "East Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_76", 0.85),("kingdom_78", 0.85),("forest_bandits", -0.05)], [], 0x00B1EFFF),
("kingdom_78", "North Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_76", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x0028464f),
("kingdom_79", "Kingdom of Gwynedd", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_9", -0.80),("forest_bandits", -0.05)], [], 0x00526652),
("kingdom_80", "Most Serene House Doria", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FFAD26),
("kingdom_81", "Most Serene House D'Appiano", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00F19E0E),
("kingdom_82", "Kingdom of Italy", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00a19c7d),
("kingdom_83", "Kingdom of Florence", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x005d1144),
("kingdom_84", "Kingdom of House Premyslid", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x0075728a),
#End rebels
##Begin rebels old colors
# ("kingdom_43", "Teutonics-Prussians", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05),("kingdom_1", -40.00)], [], 0x00888888), #0x00c9c9c9 #Change colors here, as well as relations 0XCC9900 is color, ("forest_bandits", -0.05) means -5 relations against forest bandits, must apply to both factions.
# ("kingdom_44", "Wendish Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x009DC196),
# ("kingdom_45", "Pecheng Clan", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00a33e32),
# ("kingdom_46", "Kingdom of Scandinavia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00EC5D4C),
# ("kingdom_47", "Pomeralia Principalities", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FF5A5A),
# ("kingdom_48", "Empire of Germania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00BD9700),
# ("kingdom_49", "Empire of Carpathia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x003DD93B),
# ("kingdom_50", "Kievan Rus", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00700c50), #0x00E714A3
# ("kingdom_51", "Kingdom of Brittania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00DE3622),
# ("kingdom_52", "New Francia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x003D69FA),
# ("kingdom_53", "Kingdom of Norge", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FF3737),
# ("kingdom_54", "Chiefdom of Kemi", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00157B5F),
# ("kingdom_55", "Kingdom of Ireland", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x0090E715),
# ("kingdom_56", "Chiefdom of Sapmi", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00668EFF),
# ("kingdom_57", "Kingdom of Galicia-Volhynia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FFF826),
# ("kingdom_58", "Hispania", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x000055FF),
# ("kingdom_59", "Principality of Catalonia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x0004681E),
# ("kingdom_60", "Crown of Leon", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x0078366E),
# ("kingdom_61", "Kingdom of Vasconia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00ADAA6A),
# ("kingdom_62", "Ahlidid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00586800),
# ("kingdom_63", "Union of Papacy", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00F0D80B),
# ("kingdom_64", "Eastern Roman Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x004E0000),
# ("kingdom_65", "Kingdom of Armenia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FFFFFF),
# ("kingdom_66", "Kingdom of Apulia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x003B5D75),
# ("kingdom_67", "Hashimid Emirate", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x007A7800),
# ("kingdom_68", "Empire of Aegean", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FF8C66),
# ("kingdom_69", "Kingdom of Georgia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00855400),
# ("kingdom_70", "Maghrawavid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x006F6457),
# ("kingdom_71", "Kingdom of Epirius", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00B86938),
# ("kingdom_72", "Kingdom of Karvuna-Moesia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x004C6961),
# ("kingdom_73", "Zayanid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00BB4C50),
# ("kingdom_74", "Kingdom of Croatia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00B916B4),
# ("kingdom_75", "Middle Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_78", 0.85),("kingdom_76", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x006C8B93),
# ("kingdom_76", "West Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_78", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x0048DAFF),
# ("kingdom_77", "East Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_76", 0.85),("kingdom_78", 0.85),("forest_bandits", -0.05)], [], 0x00B1EFFF),
# ("kingdom_78", "North Rurikid Dynasty", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_1", -0.50),("kingdom_75", 0.85),("kingdom_76", 0.85),("kingdom_77", 0.85),("forest_bandits", -0.05)], [], 0x00376571),
# ("kingdom_79", "Kingdom of Gwynedd", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("kingdom_9", -0.80),("forest_bandits", -0.05)], [], 0x00A0D1A0),
# ("kingdom_80", "Most Serene House Doria", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00FFAD26),
# ("kingdom_81", "Most Serene House D'Appiano", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00F19E0E),
# ("kingdom_82", "Kingdom of Italy", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00A1ADE1),
# ("kingdom_83", "Kingdom of Florence", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00D8239C),
# ("kingdom_84", "Kingdom of House Premyslid", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0x00aca6ce),
# #End old colors
#Begin Civil (If rebels win the war, they change to a civil faction below). #84-126 for testing
# ("kingdom_85", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_86", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_87", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_88", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_89", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_90", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_91", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_92", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_93", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_94", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_95", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_96", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_97", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_98", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_99", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_100", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_101", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_102", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_103", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_104", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_105", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_106", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_107", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_108", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_109", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_110", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_111", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_112", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_113", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_114", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_115", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_116", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_117", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_118", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_119", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_120", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
# ("kingdom_121", "Empire Of Swadia", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC9900),
# ("kingdom_122", "Empire Of Vaegir", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X669999),
# ("kingdom_123", "Khergit Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCC66FF),
# ("kingdom_124", "Empire Of Nord", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X3333FF),
# ("kingdom_125", "Empire Of Rhodok", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0X99FF66),
# ("kingdom_126", "Sarranid Empire", 0, 0.9, [("outlaws",-0.05),("peasant_rebels", -0.1),("deserters", -0.02),("mountain_bandits", -0.05),("forest_bandits", -0.05)], [], 0XCCFF66),
#End civil
#KAOS (POLITICAL)
#####Kaos Safe End
#####Kaos end add faction
("kingdoms_end", "{!}kingdoms end", 0, 0.0, []),
("robber_knights", "{!}robber knights", 0, 0.1, []),
("khergits", "{!}Khergits", 0, 0.5, []),
("black_khergits", "{!}Black Khergits", 0, 0.5, [("kingdom_1",-0.02),("kingdom_2",-0.02),("player_faction",-0.30)]),
("manhunters", "Manhunters", 0, 0.5, [("forest_bandits",-0.60),("player_faction",0.10),("deserters",-0.60),("mountain_bandits",-0.60),("outlaws",-0.60)]),
("deserters", "Deserters", 0, 0.5, [("kingdom_9",-0.02),("kingdom_15",-0.02),("kingdom_23",-0.02),("kingdom_3",-0.02),("kingdom_24",-0.02),("kingdom_10",-0.02),("kingdom_1",-0.02),("kingdom_20",-0.02),("kingdom_13",-0.02),("kingdom_19",-0.02),("kingdom_36",-0.02),("kingdom_11",-0.02),("player_supporters_faction",-0.02),("kingdom_34",-0.02),("kingdom_37",-0.02),("kingdom_25",-0.02),("kingdom_8",-0.02),("kingdom_5",-0.02),("kingdom_42",-0.02),("kingdom_2",-0.02),("kingdom_31",-0.02),("merchants",-0.50),("kingdom_22",-0.02),("kingdom_32",-0.02),("kingdom_35",-0.02),("player_faction",-0.10),("kingdom_18",-0.02),("kingdom_26",-0.02),("papacy",-0.02),("kingdom_38",-0.02),("kingdom_28",-0.02),("crusade",-0.02),("kingdom_39",-0.02),("kingdom_30",-0.02),("manhunters",-0.60),("kingdom_7",-0.02),("kingdom_16",-0.02),("kingdom_6",-0.02),("kingdom_12",-0.02),("kingdom_33",-0.02),("kingdom_40",-0.02),("kingdom_4",-0.02),("kingdom_29",-0.02),("kingdom_41",-0.02),("kingdom_14",-0.02),("kingdom_17",-0.02),("kingdom_27",-0.02)], [], 0x00888888),
("mountain_bandits", "Mountain Bandits", 0, 0.5, [("kingdom_9",-0.05),("kingdom_15",-0.05),("kingdom_23",-0.05),("kingdom_3",-0.05),("kingdom_24",-0.05),("kingdom_10",-0.05),("kingdom_1",-0.05),("kingdom_20",-0.05),("kingdom_13",-0.05),("kingdom_19",-0.05),("kingdom_36",-0.05),("kingdom_11",-0.05),("player_supporters_faction",-0.05),("kingdom_34",-0.05),("kingdom_37",-0.05),("kingdom_25",-0.05),("kingdom_8",-0.05),("kingdom_5",-0.05),("kingdom_42",-0.05),("kingdom_2",-0.05),("kingdom_31",-0.05),("merchants",-0.50),("kingdom_22",-0.05),("kingdom_32",-0.05),("kingdom_35",-0.05),("player_faction",-0.15),("kingdom_18",-0.05),("kingdom_26",-0.05),("papacy",-0.05),("kingdom_38",-0.05),("kingdom_28",-0.05),("crusade",-0.05),("kingdom_39",-0.05),("kingdom_30",-0.05),("manhunters",-0.60),("kingdom_7",-0.05),("kingdom_16",-0.05),("kingdom_6",-0.05),("kingdom_12",-0.05),("kingdom_33",-0.05),("kingdom_40",-0.05),("kingdom_4",-0.05),("kingdom_29",-0.05),("commoners",-0.20),("kingdom_41",-0.05),("kingdom_14",-0.05),("kingdom_17",-0.05),("kingdom_27",-0.05)], [], 0x00888888),
("forest_bandits", "Forest Bandits", 0, 0.5, [("kingdom_9",-0.05),("kingdom_15",-0.05),("kingdom_23",-0.05),("kingdom_3",-0.05),("kingdom_24",-0.05),("kingdom_10",-0.05),("kingdom_1",-0.05),("kingdom_20",-0.05),("kingdom_13",-0.05),("kingdom_19",-0.05),("kingdom_36",-0.05),("kingdom_11",-0.05),("player_supporters_faction",-0.05),("kingdom_34",-0.05),("kingdom_37",-0.05),("kingdom_25",-0.05),("kingdom_8",-0.05),("kingdom_5",-0.05),("kingdom_42",-0.05),("kingdom_2",-0.05),("kingdom_31",-0.05),("merchants",-0.50),("kingdom_22",-0.05),("kingdom_32",-0.05),("kingdom_35",-0.05),("player_faction",-0.15),("kingdom_18",-0.05),("kingdom_26",-0.05),("papacy",-0.05),("kingdom_38",-0.05),("kingdom_28",-0.05),("crusade",-0.05),("kingdom_39",-0.05),("kingdom_30",-0.05),("manhunters",-0.60),("kingdom_7",-0.05),("kingdom_16",-0.05),("kingdom_6",-0.05),("kingdom_12",-0.05),("kingdom_33",-0.05),("kingdom_40",-0.05),("kingdom_4",-0.05),("kingdom_29",-0.05),("commoners",-0.20),("kingdom_41",-0.05),("kingdom_14",-0.05),("kingdom_17",-0.05),("kingdom_27",-0.05)], [], 0x00888888),
("undeads", "{!}Undeads", 0, 0.5, [("player_faction",-0.50),("commoners",-0.70)]),
("slavers", "{!}Slavers", 0, 0.1, []),
("peasant_rebels", "{!}Peasant Rebels", 0, 1.0, [("kingdom_9",-0.10),("kingdom_15",-0.10),("kingdom_23",-0.10),("kingdom_3",-0.10),("kingdom_24",-0.10),("kingdom_10",-0.10),("noble_refugees",-1.00),("kingdom_1",-0.10),("kingdom_20",-0.10),("kingdom_13",-0.10),("kingdom_19",-0.10),("kingdom_36",-0.10),("kingdom_11",-0.10),("player_supporters_faction",-0.10),("kingdom_34",-0.10),("kingdom_37",-0.10),("kingdom_25",-0.10),("kingdom_8",-0.10),("kingdom_5",-0.10),("kingdom_42",-0.10),("kingdom_2",-0.10),("kingdom_31",-0.10),("kingdom_22",-0.10),("kingdom_32",-0.10),("kingdom_35",-0.10),("player_faction",-0.40),("kingdom_18",-0.10),("kingdom_26",-0.10),("papacy",-0.10),("kingdom_38",-0.10),("kingdom_28",-0.10),("crusade",-0.10),("kingdom_39",-0.10),("kingdom_30",-0.10),("kingdom_7",-0.10),("kingdom_16",-0.10),("kingdom_6",-0.10),("kingdom_12",-0.10),("kingdom_33",-0.10),("kingdom_40",-0.10),("kingdom_4",-0.10),("kingdom_29",-0.10),("kingdom_41",-0.10),("kingdom_14",-0.10),("kingdom_17",-0.10),("kingdom_27",-0.10)]),
("noble_refugees", "{!}Noble Refugees", 0, 0.5, [("peasant_rebels",-1.00)]),
("crusade", "Crusaders", 0, 0.9, [("kingdom_3",-0.50),("kingdom_20",-0.50),("kingdom_36",-0.50),("kingdom_34",-0.50),("kingdom_25",-0.50),("kingdom_2",-0.50),("kingdom_31",-0.50),("peasant_rebels",-0.10),("forest_bandits",-0.05),("kingdom_35",-0.50),("papacy",-0.50),("kingdom_38",-0.50),("kingdom_28",-0.50),("deserters",-0.02),("mountain_bandits",-0.05),("outlaws",-0.05),("kingdom_33",-0.50),("kingdom_27",-0.50)], [], 0x00fff17a),
("end_minor_faction", "Village Idiots", 0, 0.9, [], [], 0x00fff17a),
]
| 131.091716 | 1,080 | 0.608296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32,636 | 0.736555 |
3492885ac8a900a114a775185286f143d7123ed9
| 236 |
py
|
Python
|
data/python/pattern_12/code.py
|
MKAbuMattar/grammind-api
|
ccf6e9898f50f9e4c7671abecf65029198e2dc72
|
[
"MIT"
] | 3 |
2021-12-29T13:03:27.000Z
|
2021-12-31T20:27:17.000Z
|
data/python/pattern_12/code.py
|
MKAbuMattar/grammind-api
|
ccf6e9898f50f9e4c7671abecf65029198e2dc72
|
[
"MIT"
] | 2 |
2022-01-15T13:08:13.000Z
|
2022-01-18T19:41:07.000Z
|
data/python/pattern_12/code.py
|
MKAbuMattar/grammind-api
|
ccf6e9898f50f9e4c7671abecf65029198e2dc72
|
[
"MIT"
] | null | null | null |
#MAIN PROGRAM STARTS HERE:
num = int(input('Enter the number of rows and columns for the square: '))
for x in range(0, num):
i = x + 1
for y in range(0, num):
print ('{} '.format(i), end='')
i += num
print()
| 26.222222 | 73 | 0.555085 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88 | 0.372881 |
34940ee04b10ec17bfb59f0b5abe7be0ed5efa38
| 4,602 |
py
|
Python
|
models/bert_with_conversation_context.py
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
|
b91c7a9f3ad70edd0f39b56e3219f48d1fcf2078
|
[
"Apache-2.0"
] | null | null | null |
models/bert_with_conversation_context.py
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
|
b91c7a9f3ad70edd0f39b56e3219f48d1fcf2078
|
[
"Apache-2.0"
] | null | null | null |
models/bert_with_conversation_context.py
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
|
b91c7a9f3ad70edd0f39b56e3219f48d1fcf2078
|
[
"Apache-2.0"
] | 1 |
2021-11-24T18:48:47.000Z
|
2021-11-24T18:48:47.000Z
|
import pickle
import torch
import torch.nn as nn
from torchtext.data import Field
from common.paths import ROOT_RELATIVE_DIR, MODEL_PATH
from models.bert_layer import BERTLayer
from probability.tables import TransitionTable
from utility.model_parameter import Configuration, ModelParameter
class BERTWithConversationContext(nn.Module):
def __init__(self, config: Configuration, class_count=100,
hidden_dim=100,
class_field=None, device=torch.device('cpu')):
"""
Simple embedding using position and previous class
:param config:
:param class_count:
:param vocab_size:
"""
super(BERTWithConversationContext, self).__init__()
# set parameters
self.hidden_dim = hidden_dim
self.class_field: Field = class_field
self.class_count = class_count
self.max_length = config.get_int(ModelParameter.MAX_LENGTH)
self.embedding_size = config.get_int(ModelParameter.EMBEDDING_SIZE)
self.device = device
# configure history components
self.with_position_embedding = False
self.with_class_embedding = True
self.with_utterance_classifier = False
self.with_transition_probs = False
# create and initialize layers
# learns embedding vector for class labels
self.class_embedding = nn.Embedding(self.class_count, self.embedding_size)
# learns class label for positions
self.position_embedding = nn.Embedding(100, self.embedding_size)
# load probability table and neural_bert_models
with open(ROOT_RELATIVE_DIR + MODEL_PATH + "transition_table.pckl", "rb") as file_prt:
transition_table: TransitionTable = pickle.load(file_prt)
transition_table.lambda_value = 1
transition_table.class_field = class_field
fixed_embedding = transition_table.create_probability_matrix(device=self.device)
self.transition_embedding = nn.Embedding(self.class_count, self.class_count)
self.transition_embedding.weight.data.copy_(fixed_embedding)
self.transition_embedding.weight.requires_grad = False
# embed previous sample
self.utterance_classifier = BERTLayer(device=self.device)
# output layer
self.dropout = nn.Dropout(p=0.5)
self.linear_input_size = self.get_embeddings_length() + self.get_additional_length() + self.utterance_classifier.get_output_length()
self.linear_layer = nn.Linear(self.linear_input_size, class_count)
self.softmax = nn.Softmax(dim=0)
def get_embeddings_length(self):
multiplier = 0
if self.with_position_embedding:
multiplier += 1
if self.with_class_embedding:
multiplier += 1
return self.embedding_size * multiplier
def get_additional_length(self):
add_length = 0
if self.with_utterance_classifier:
add_length += self.utterance_classifier.get_output_length()
if self.with_transition_probs:
add_length += self.class_count
return add_length
def forward(self, sample, previous_classes, positions, previous_sample, *args, **kwargs):
representations = []
# embed sample
sample_embed = self.utterance_classifier(sample)
representations.append(sample_embed)
# encode the previous utterance into a matrix
if self.with_utterance_classifier:
with torch.no_grad():
self.utterance_classifier.eval()
representations.append(self.utterance_classifier(previous_sample))
# learn vector representations for position and class
if self.with_class_embedding:
embed_previous_classes = self.class_embedding(previous_classes)
representations.append(embed_previous_classes)
if self.with_position_embedding:
embed_position = self.position_embedding(positions)
representations.append(embed_position)
# transition probs and apply dropout and
if self.with_transition_probs:
representations.append(self.transition_embedding(previous_classes))
# concatenate all representations
concat_class_and_position = torch.cat(representations, 1)
concat_class_and_position = self.dropout(concat_class_and_position)
# linear transformation
output = self.linear_layer(concat_class_and_position)
# calculate probabilities
probs = self.softmax(output)
return output, probs
| 38.672269 | 140 | 0.69535 | 4,307 | 0.935897 | 0 | 0 | 0 | 0 | 0 | 0 | 653 | 0.141895 |
3495b6bc67d0421d48767015a4e9b3c968d3cfd4
| 3,468 |
py
|
Python
|
2016/qualification/sol.py
|
victorWeiFreelancer/Hashcode
|
e9cac8531f07962fedbfba6605acfa6e6067747d
|
[
"MIT"
] | null | null | null |
2016/qualification/sol.py
|
victorWeiFreelancer/Hashcode
|
e9cac8531f07962fedbfba6605acfa6e6067747d
|
[
"MIT"
] | null | null | null |
2016/qualification/sol.py
|
victorWeiFreelancer/Hashcode
|
e9cac8531f07962fedbfba6605acfa6e6067747d
|
[
"MIT"
] | null | null | null |
import sys
import math
from time import gmtime, strftime
sys.dont_write_bytecode = True
class Position():
def __init__(self, x, y):
super().__init__()
self.x, self.y = x, y
def distance(pos1, pos2):
return ((pos1.x - pos2.x)**2 + (pos1.y - pos2.y)**2 )**0.5
class Warehouse(object):
def __init__(self, x, y):
self.pos = Position(x, y)
self.storage = []
super().__init__()
class CustomOrder(object):
def __init__(self, id, x, y):
super().__init__()
self.id = id
self.pos = Position(x, y)
self.numProd = 0
self.itemsList = []
self.utility = 0
class Drone(object):
def __init__(self, id, x, y, remainCap):
super().__init__()
self.id = id
self.pos = Position(x, y)
self.actions = []
self.remainCap =
def readInput(Plookup, warehouses, orders, drones):
if(len(sys.argv)==1):
nRow, nCol, nD, T, L = list(map(int, input().split()))
P = int(input())
Plookup.extend(list(map(int, input().split())))
nW = int(input())
for i in range(nW):
pos = list(map(int, input().split()))
w = Warehouse(pos[0], pos[1])
w.storage.extend( list(map(int, input().split())) )
warehouses.append(w)
C = int(input())
for i in range(C):
pos = list(map(int, input().split()))
co = CustomOrder(i, pos[0], pos[1])
co.numProd = int(input())
co.itemsList = list(map(int, input().split()))
orders.append(co)
else:
with open(sys.argv[1], 'r') as fo:
nRow, nCol, nD, T, L = list(map(int, fo.readline().split()))
P = int(fo.readline())
Plookup.extend(list(map(int, fo.readline().split())))
nW = int(fo.readline())
for i in range(nW):
pos = list(map(int, fo.readline().split()))
w = Warehouse(pos[0], pos[1])
w.storage.extend( list(map(int, fo.readline().split())) )
warehouses.append(w)
C = int(fo.readline())
for i in range(C):
pos = list(map(int, fo.readline().split()))
co = CustomOrder(i, pos[0], pos[1])
co.numProd = int(fo.readline())
co.itemsList = list(map(int, fo.readline().split()))
orders.append(co)
for i in range(nD):
d = Drone(i, warehouses[0].pos.x, warehouses[0].pos.y, L)
print(nRow, nCol, nD, T, L, P, nW)
return nRow, nCol, nD, T, L, P, nW
def utility(warehouse, order):
dist = distance(warehouse.pos, order.pos)
return 1.0/dist
def utility_cal(L, Plookup, warehouses, orders):
w = warehouses[0]
orders.sort( key= lambda order:utility(w, order) )
def schedule(L, Plookup, warehouses, orders, drones):
for order in orders:
def main():
Plookup = []
warehouses = []
orders = []
drones = []
actionPlan = []
startT = strftime("%H:%M:%S")
nRow, nCol, nD, T, L, P, nW = readInput(Plookup, warehouses, orders, drones)
utility_cal(L, Plookup, warehouses, orders)
# finishT = strftime("%H:%M:%S")
# fw = open(" ".join([sys.argv[1].split('.')[0], startT, finishT, ".out"]),'w')
# numUsedCache = sum(1 for c in caches[0:-1] if len(c.videos)>0 )
# fw.write( str(numUsedCache)+'\n')
if __name__ == '__main__':
main()
| 30.421053 | 83 | 0.531142 | 663 | 0.191176 | 0 | 0 | 0 | 0 | 0 | 0 | 234 | 0.067474 |
3496f98bc54d566d0b2f81898c8f900cd96ce375
| 2,313 |
py
|
Python
|
app/__init__.py
|
doshmajhan/python-idp
|
55be99afd02de4e8b0840c0a2236906d4b9a1827
|
[
"MIT"
] | 1 |
2021-06-13T18:29:20.000Z
|
2021-06-13T18:29:20.000Z
|
app/__init__.py
|
doshmajhan/python-idp
|
55be99afd02de4e8b0840c0a2236906d4b9a1827
|
[
"MIT"
] | 1 |
2022-03-30T05:37:09.000Z
|
2022-03-30T05:37:09.000Z
|
app/__init__.py
|
doshmajhan/python-idp
|
55be99afd02de4e8b0840c0a2236906d4b9a1827
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_restful import Api
from saml2 import saml, samlp
from saml2.config import IdPConfig
from saml2.mdstore import MetadataStore
from saml2.server import Server
from app.config import config
from app.database import db
from app.resources.idp_config import IdpConfigResource
from app.resources.index import Index
from app.resources.login import Login
from app.resources.metadata import (
IdpMetadataResource,
SpMetadataListResource,
SpMetadataResource,
)
from app.resources.sso import SsoResource
from app.resources.users import UsersListResource, UsersResource
from app.schemas import ma
# TODO error handling for if config file doesn't exist
def create_app(config_name="default") -> Flask:
# Initialize flask app with config
app: Flask = Flask(__name__)
app.config.from_object(config[config_name])
# Initialize SQLAlchemy db with Flask app
db.init_app(app)
# Initialize Marshmallow with Flask app
ma.init_app(app)
# Create flask-restful API with Flask app
api: Api = Api(app)
# Load our IDP config and create the core IDP server
idp_config = IdPConfig()
idp_config.load_file(app.config["IDP_CONFIG"])
metadata_store: MetadataStore = MetadataStore([saml, samlp], None, idp_config)
idp_config.metadata = metadata_store
idp: Server = Server(config=idp_config)
# Add all API resources
api.add_resource(Index, "/")
api.add_resource(Login, "/login")
api.add_resource(IdpConfigResource, "/config", resource_class_args=[idp])
api.add_resource(SsoResource, "/sso", resource_class_args=[idp])
api.add_resource(IdpMetadataResource, "/metadata", resource_class_args=[idp])
api.add_resource(SpMetadataListResource, "/metadata/sp", resource_class_args=[idp])
api.add_resource(
SpMetadataResource,
"/metadata/sp/<string:sp_entity_id>",
resource_class_args=[idp],
)
api.add_resource(UsersListResource, "/users")
api.add_resource(UsersResource, "/users/<string:username>")
# Create db tables
with app.app_context():
db.create_all()
# Add default headers to every response
@app.after_request
def add_header(response):
response.headers["Access-Control-Allow-Origin"] = "*"
return response
return app
| 31.684932 | 87 | 0.736273 | 0 | 0 | 0 | 0 | 134 | 0.057933 | 0 | 0 | 515 | 0.222655 |
3499539f373f9ce023ee2fc68de3748959a132f3
| 5,190 |
py
|
Python
|
install.py
|
mrobraven/majestic-pi
|
8ebe001a0e2f2eca475c2a390228e7810630f62e
|
[
"MIT"
] | null | null | null |
install.py
|
mrobraven/majestic-pi
|
8ebe001a0e2f2eca475c2a390228e7810630f62e
|
[
"MIT"
] | null | null | null |
install.py
|
mrobraven/majestic-pi
|
8ebe001a0e2f2eca475c2a390228e7810630f62e
|
[
"MIT"
] | null | null | null |
import os
os.mkdir("3D")
os.chdir("3D")
print("Configuring 3D...")
print("Fetching Intro...")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/intro.mp4")
print("Fetching 3D Feature...")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/3D/glasses.mp4")
print("Fetching Odeon Mimics...")
os.mkdir("mimics")
os.chdir("mimics")
os.mkdir("Odeon")
os.chdir("Odeon")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/3.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/4.mp4")
print("Fetching Vue Mimics...")
os.chdir("..")
os.mkdir("Vue")
os.chdir("Vue")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/3.mp4")
print("Fetching Cineworld Mimics...")
os.chdir("..")
os.mkdir("Cineworld")
os.chdir("Cineworld")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/3.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/4.mp4")
print("Fetching Adverts...")
os.chdir("..")
os.chdir("..")
os.mkdir("DCP")
os.chdir("DCP")
os.mkdir("adverts")
os.chdir("adverts")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Digital_Cinema_Media.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Pearl_And_Dean.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Pearl_And_Dean_Old.mp4")
os.rename("Digital_Cinema_Media.mp4", "Digital_Cinema_Media.dca.mp4")
os.rename("Pearl_And_Dean.mp4", "Pearl_And_Dean.dca.mp4")
os.rename("Pearl_And_Dean_Old.mp4", "Pearl_And_Dean_Old.dca.mp4")
os.chdir("..")
os.chdir("..")
os.chdir("..")
os.mkdir("2D")
os.chdir("2D")
print("Configuring 2D...")
print("Fetching Intro...")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/intro.mp4")
print("Fetching 3D Feature...")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/3D/glasses.mp4")
print("Fetching Odeon Mimics...")
os.mkdir("mimics")
os.chdir("mimics")
os.mkdir("Odeon")
os.chdir("Odeon")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/3.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Odeon/4.mp4")
print("Fetching Vue Mimics...")
os.chdir("..")
os.mkdir("Vue")
os.chdir("Vue")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Vue/3.mp4")
print("Fetching Cineworld Mimics...")
os.chdir("..")
os.mkdir("Cineworld")
os.chdir("Cineworld")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/1.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/2.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/3.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/mimics/Cineworld/4.mp4")
print("Fetching Adverts...")
os.chdir("..")
os.chdir("..")
os.mkdir("DCP")
os.chdir("DCP")
os.mkdir("adverts")
os.chdir("adverts")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Digital_Cinema_Media.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Pearl_And_Dean.mp4")
os.system("wget https://raw.githubusercontent.com/mrobraven/majestic-pi/master/majestic-pi/DCP/adverts/Pearl_And_Dean_Old.mp4")
os.rename("Digital_Cinema_Media.mp4", "Digital_Cinema_Media.dca.mp4")
os.rename("Pearl_And_Dean.mp4", "Pearl_And_Dean.dca.mp4")
os.rename("Pearl_And_Dean_Old.mp4", "Pearl_And_Dean_Old.dca.mp4")
print("Done!")
| 55.212766 | 129 | 0.773796 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,163 | 0.802119 |
349ac7bf295420ef2ab524ccb2bae107924bef1f
| 9,274 |
py
|
Python
|
docs/conf.py
|
myii/saltenv
|
2309e6759504f5326a444270c8e8bb3edf14b760
|
[
"Apache-2.0"
] | 5 |
2022-03-25T17:15:04.000Z
|
2022-03-28T23:24:26.000Z
|
docs/conf.py
|
myii/saltenv
|
2309e6759504f5326a444270c8e8bb3edf14b760
|
[
"Apache-2.0"
] | null | null | null |
docs/conf.py
|
myii/saltenv
|
2309e6759504f5326a444270c8e8bb3edf14b760
|
[
"Apache-2.0"
] | 2 |
2022-03-26T06:33:30.000Z
|
2022-03-29T19:43:50.000Z
|
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import datetime
from pathlib import Path
# -- Project information -----------------------------------------------------
this_year = datetime.datetime.today().year
if this_year == 2022:
copyright_year = 2022
else:
copyright_year = f"2022 - {this_year}"
project = "saltenv"
copyright = f"{copyright_year}, nicholasmhughes"
author = "nicholasmhughes"
# Strip version info from ../../saltenv/version.py
with open(Path(Path(__file__).parent.parent, "saltenv", "version.py")) as version_file:
content = version_file.readlines()
for file_line in content:
if "version =" in file_line:
version = file_line.split(" ")[2].replace('"', "")
break
# Variables to pass into the docs from sitevars.rst for rst substitution
with open("sitevars.rst") as site_vars_file:
site_vars = site_vars_file.read().splitlines()
rst_prolog = """
{}
""".format(
"\n".join(site_vars[:])
)
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = "3.5.3"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx_copybutton",
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.coverage",
# "sphinxcontrib.spelling",
]
# Render TODO directives, set to FALSE before publishing
# This is incredibly helpful, when set to True, to know what is yet to be
# completed in documentation.
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"_build",
"Thumbs.db",
".DS_Store",
".vscode",
".venv",
".git",
".gitlab-ci",
".gitignore",
"sitevars.rst",
]
autosummary_generate = True
# ----- Napolean Config ------------------------------------------------------
# For using Google-style docstrings in Python code as a standard, which is
# highly recommended. This improves tooling by expecting a standard way of
# using docstrings in your project.
# https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html
napoleon_google_docstring = True
napoleon_numpy_docstring = False
napoleon_include_init_with_doc = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# ----- Intersphinx Config ---------------------------------------------------
# This extension can generate automatic links to the documentation of objects
# in other projects, such as the official Python or POP docs.
# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"pytest": ("https://pytest.readthedocs.io/en/stable", None),
"pop": ("https://pop.readthedocs.io/en/latest/", None),
}
# ----- Autodoc Config -------------------------------------------------------
# This extension can import the modules you are documenting, and pull in
# documentation from docstrings in a semi-automatic way. This is powerful!
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
autodoc_default_options = {"member-order": "bysource"}
# ----- Autosummary Config ---------------------------------------------------
# This extension generates function/method/attribute summary lists, similar to
# those output e.g. by Epydoc and other API doc generation tools. This is
# especially useful when your docstrings are long and detailed, and putting
# each one of them on a separate page makes them easier to read.
# https://www.sphinx-doc.org/en/master/usage/extensions/autosummary.html
autosummary_generate = True
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "furo"
html_title = f"{project} Documentation"
html_show_sourcelink = True # False on private repos; True on public repos
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# For example, official Salt Project docs use images from the salt-branding-guide
# https://gitlab.com/saltstack/open/salt-branding-guide/
#
# Example for >=4.0.0 of Sphinx (support for favicon via URL)
# html_logo = "https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/logos/SaltProject_altlogo_teal.png?inline=true"
# Example for <4.0.0 of Sphinx, if added into _static/img/ and html_static_path is valid
# html_logo = "_static/img/SaltProject_altlogo_teal.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large. Favicons can be up to at least 228x228. PNG
# format is supported as well, not just .ico'
# For example, official Salt Project docs use images from the salt-branding-guide
# https://gitlab.com/saltstack/open/salt-branding-guide/
#
# Example for >=4.0.0 of Sphinx (support for favicon via URL)
# html_favicon = "https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/logos/SaltProject_Logomark_teal.png?inline=true"
# Example for <4.0.0 of Sphinx, if added into _static/img/ and html_static_path is valid
# html_favicon = "_static/img/SaltProject_Logomark_teal.png"
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "saltenvdoc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"saltenv.tex",
"saltenv Documentation",
"nicholasmhughes",
"manual",
),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"saltenv",
"saltenv Documentation",
[author],
1,
)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"saltenv",
"saltenv Documentation",
author,
"saltenv",
"One line description of project.",
"Miscellaneous",
),
]
# -- Extension configuration -------------------------------------------------
| 34.996226 | 133 | 0.667026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7,292 | 0.786284 |
349b663f81956d7e9257fe5e42ee283abd8a9e68
| 2,094 |
py
|
Python
|
utils.py
|
momskidvaava/firefoxbot
|
1ed85e4f6594b144ceabdecb19e6e022180e639e
|
[
"MIT"
] | null | null | null |
utils.py
|
momskidvaava/firefoxbot
|
1ed85e4f6594b144ceabdecb19e6e022180e639e
|
[
"MIT"
] | null | null | null |
utils.py
|
momskidvaava/firefoxbot
|
1ed85e4f6594b144ceabdecb19e6e022180e639e
|
[
"MIT"
] | 1 |
2021-11-25T14:05:25.000Z
|
2021-11-25T14:05:25.000Z
|
import datetime
import typing
import localization
from configurator import Config
def get_restriction_time(string: str) -> typing.Optional[int]:
"""
Get user restriction time in seconds
:param string: string to check for multiplier. The last symbol should be one of:
"m" for minutes, "h" for hours and "d" for days
:return: number of seconds to restrict or None if error
"""
if len(string) < 2:
return None
letter = string[-1]
try:
number = int(string[:-1])
except TypeError:
return None
else:
if letter == "m":
return 60 * number
elif letter == "h":
return 3600 * number
elif letter == "d":
return 86400 * number
else:
return None
def get_report_comment(message_date: datetime.datetime, message_id: int, report_message: typing.Optional[str]) -> str:
"""
Generates a report message for admins
:param message_date: Datetime when reported message was sent
:param message_id: ID of that message
:param report_message: An optional note for admins so that they can understand what's wrong
:return: A report message for admins in report chat
"""
msg = localization.get_string("report_message").format(
date=message_date.strftime(localization.get_string("report_date_format")),
chat_id=get_url_chat_id(int(Config.GROUP_MAIN)),
msg_id=message_id)
if report_message:
msg += localization.get_string("report_note").format(note=report_message)
return msg
def get_url_chat_id(chat_id: int) -> int:
"""
Well, this value is a "magic number", so I have to explain it a bit.
I don't want to use hardcoded chat username, so I just take its ID (see "group_main" variable above),
add id_compensator and take a positive value. This way I can use https://t.me/c/{chat_id}/{msg_id} links,
which don't rely on chat username.
:param chat_id: chat_id to apply magic number to
:return: chat_id for t.me links
"""
return abs(chat_id+1_000_000_000_000)
| 33.238095 | 118 | 0.668577 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,054 | 0.503343 |
349bc17ddf478dd5dde43f9722c96d91cc6f8502
| 548 |
py
|
Python
|
test/torch/differential_privacy/test_pate.py
|
amitkarn3/PythnSyft
|
8eaa637e1ca54c963281e847556cb14b4a76b46b
|
[
"Apache-1.1"
] | null | null | null |
test/torch/differential_privacy/test_pate.py
|
amitkarn3/PythnSyft
|
8eaa637e1ca54c963281e847556cb14b4a76b46b
|
[
"Apache-1.1"
] | null | null | null |
test/torch/differential_privacy/test_pate.py
|
amitkarn3/PythnSyft
|
8eaa637e1ca54c963281e847556cb14b4a76b46b
|
[
"Apache-1.1"
] | null | null | null |
import numpy as np
from syft.frameworks.torch.differential_privacy import pate
def test_base_dataset():
num_teachers, num_examples, num_labels = (100, 50, 10)
preds = (np.random.rand(num_teachers, num_examples) * num_labels).astype(int) # fake preds
indices = (np.random.rand(num_examples) * num_labels).astype(int) # true answers
preds[:, 0:10] *= 0
data_dep_eps, data_ind_eps = pate.perform_analysis(
teacher_preds=preds, indices=indices, noise_eps=0.1, delta=1e-5
)
assert data_dep_eps < data_ind_eps
| 30.444444 | 95 | 0.717153 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.047445 |
349c048a588296bb67dea9d1d337e93b39772ac1
| 381 |
py
|
Python
|
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
|
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
|
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
|
[
"MIT"
] | null | null | null |
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
|
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
|
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
|
[
"MIT"
] | null | null | null |
03_Avanzado/09_CursoBasico_Python/codigo/funciones.py
|
LeoSan/CarreraFundamentosProgramacion_Platzi_2021
|
9db6ac33a755f855fbb9c41a9bd0e02712f37cb3
|
[
"MIT"
] | null | null | null |
#Programa ejemplo para usar función
#funcion sin parametros
def imprimir_mensaje():
print("Mensaje especial:")
print("Estoy aprendiendo:")
imprimir_mensaje()
#funcion con parametros
valorA= "Hola mundo"
valorB= "Función con parametros"
def imprimir_mensaje_param(mensaje1, mensaje2):
print(mensaje1)
print(mensaje2)
imprimir_mensaje_param(valorA, valorB)
| 20.052632 | 47 | 0.755906 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 159 | 0.415144 |
34a032f572f524fc63d35b7eac84530ba6ee0e35
| 7,130 |
py
|
Python
|
orchestrator/cots/gdal/gdal_rasterize.py
|
spacebel/MAJA
|
3e5d20bc9c744c610e608cfcf1f4c5c738d4de9e
|
[
"Apache-2.0"
] | 57 |
2020-09-30T08:51:22.000Z
|
2021-12-19T20:28:30.000Z
|
orchestrator/cots/gdal/gdal_rasterize.py
|
spacebel/MAJA
|
3e5d20bc9c744c610e608cfcf1f4c5c738d4de9e
|
[
"Apache-2.0"
] | 34 |
2020-09-29T21:27:22.000Z
|
2022-02-03T09:56:45.000Z
|
orchestrator/cots/gdal/gdal_rasterize.py
|
spacebel/MAJA
|
3e5d20bc9c744c610e608cfcf1f4c5c738d4de9e
|
[
"Apache-2.0"
] | 14 |
2020-10-11T13:17:59.000Z
|
2022-03-09T15:58:19.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Centre National d'Etudes Spatiales (CNES)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
###################################################################################################
o o
oo oo oo o oo ,-.
o o o o o o o o o \_/
o o o o o o o o {|||D
o o oooooo o oooooo / \
o o o o o o o o `-^
o o o o oooo o o
###################################################################################################
orchestrator.cots.gdal.gdal_rasterize -- shortdesc
orchestrator.cots.gdal.gdal_rasterize is a description
It defines classes_and_methods
###################################################################################################
"""
import os
import tempfile
import time
from ..maja_cots import MajaCots
from orchestrator.common.logger.maja_logging import configure_logger
from orchestrator.common.maja_utils import get_test_mode
LOGGER = configure_logger(__name__)
def rasterize_det_foo(input_file, working_directory, option_user={}, output_image=None):
"""
Rasterization for detector footprint gml mask
"""
options = {"-ot": "Byte",
"-init": 0,
"-a_nodata": 0,
"-at": None,
"-burn": 1
}
options.update(option_user)
output_image = rasterize(input_file, options, working_directory, output_image)
return output_image
def rasterize_no_data(input_file, working_directory, option_user={}, output_image=None):
"""
Rasterize for no data
"""
options = {"-ot": "Byte",
"-init": 0,
"-a_nodata": 0,
"-at": None,
"-burn": 1,
"-sql": 'select fid, maskType, * from MaskFeature',
"-where": 'maskType="QT_NODATA_PIXELS"' }
options.update(option_user)
output_image = rasterize(input_file, options, working_directory, output_image)
return output_image
def rasterize(input_file, options, working_directory, output_image=None):
"""
Run the cots rasterization
:param input_file:
:param options:
:param working_directory:
:param output_image:
:return:
"""
if output_image is None:
sub_working_directory = tempfile.mkdtemp(prefix="GDALRasterize_",
dir=working_directory)
output_image = "gdal_rasterize.tif"
else:
sub_working_directory = working_directory
c1 = GdalRasterize(sub_working_directory, output_image)
c1.pre(input_file, options)
c1.run()
c1.post()
return c1.output_image
class GdalRasterize(MajaCots):
def __init__(self, working_directory=None, output_image="gdal_rasterize.tif"):
super(GdalRasterize, self).__init__(working_directory)
# TODO: TBC
if working_directory is not None:
self.output_image = os.path.join(self.working_directory,
output_image)
def pre(self, input_file, options={}):
"""
Prepares the command line to be launched in run()
"""
list_option = []
for key, value in options.items():
if key != "":
list_option.append(key)
if value is not None:
list_option.append(value)
self.command_line = ["gdal_rasterize"] + list_option + [input_file, self.output_image]
def post(self):
pass
def run_rasterize(self, inputfilename, xmin, ymin, xmax, ymax, size_x, size_y, projection,
outputfilename, gdaladditionalcommandlineparameters):
"""
:param inputfilename: string
:param xmin: int
:param ymin: int
:param xmax: int
:param ymax: int
:param size_x: int
:param size_y: int
:param projection: string
:param outputfilename: string
:param gdaladditionalcommandlineparameters: string
:return:
"""
# Call the gdal_rasterize command system with the specific parameters
self.command_line = (
"""gdal_rasterize """ +
gdaladditionalcommandlineparameters +
""" -te """ +
str(xmin) +
""" """ +
str(ymin) +
""" """ +
str(xmax) +
""" """ +
str(ymax) +
""" -ts """ +
str(size_x) +
""" """ +
str(size_y) +
""" -a_srs '""" +
projection +
""" ' """ +
inputfilename +
""" """ +
outputfilename)
LOGGER.debug(self.command_line)
if not get_test_mode():
self.run()
return self.status
# TODO: split ?
def internal_rasterize_gml_macro(self, pinputfilename, pxmin, pymin, pxmax, pymax, psize_x, psize_y,
pprojection, image_filename, pgdaladditionalcommandlineparameters):
"""
:param pinputfilename: string
:param pxmin: int
:param pymin: int
:param pxmax: int
:param pymax: int
:param psize_x: int
:param psize_y: int
:param pprojection: string
:param image_filename: string
:param ppostfix_filenamepostfix: string
:param pgdaladditionalcommandlineparameters: string
:return:
"""
pass
# Read the additionnal parameters
# TODO: add -of tif in command eline paramters
l_shortfilename = os.path.basename(image_filename)
LOGGER.debug("vnsCachingRasterizeGmlMacro the gml file '%s' to the image file name '%s' with the "
"following parameters: %s -te %s %s %s %s -ts %s %s.",
pinputfilename, l_shortfilename, pgdaladditionalcommandlineparameters, pxmin, pymin, pxmax, pymax,
psize_x, psize_y)
start_time = time.time()
# Call program
result = self.run_rasterize(pinputfilename, pxmin, pymin, pxmax, pymax, psize_x, psize_y, pprojection,
image_filename, pgdaladditionalcommandlineparameters)
stop_time = time.time()
duration = stop_time - start_time
LOGGER.debug(" => Rasterize the file name %s run in %s.", l_shortfilename, duration)
return image_filename
| 33.317757 | 119 | 0.55021 | 3,750 | 0.525947 | 0 | 0 | 0 | 0 | 0 | 0 | 3,269 | 0.458485 |
34a1337c8f6d2a9a081a7f61b09a68afa8480561
| 7,374 |
py
|
Python
|
theseus/test/test_tracer.py
|
pexip/os-python-theseus
|
3093edd7bc4af5556bce42c8602685010c695183
|
[
"0BSD"
] | 1 |
2016-04-27T07:58:20.000Z
|
2016-04-27T07:58:20.000Z
|
theseus/test/test_tracer.py
|
pexip/os-python-theseus
|
3093edd7bc4af5556bce42c8602685010c695183
|
[
"0BSD"
] | null | null | null |
theseus/test/test_tracer.py
|
pexip/os-python-theseus
|
3093edd7bc4af5556bce42c8602685010c695183
|
[
"0BSD"
] | null | null | null |
from cStringIO import StringIO
import inspect
import textwrap
import pytest
from twisted.internet import defer, task
from theseus._tracer import Function, Tracer
class FakeCode(object):
def __init__(self, filename='', name='', flags=0):
self.co_filename = filename
self.co_name = name
self.co_flags = flags
class FakeFrame(object):
def __init__(self, code=FakeCode(), back=None, globals={}, locals={}):
self.f_code = code
self.f_back = back
self.f_globals = globals
self.f_locals = locals
class FakeFunction(object):
def __init__(self, code=FakeCode()):
self.func_code = code
def test_function_of_frame():
"""
Function.of_frame examines a frame's code for its filename and code name.
"""
frame = FakeFrame(FakeCode('spam', 'eggs'))
assert Function.of_frame(frame) == ('spam', 'eggs')
def test_do_not_trace_non_deferred_returns():
"""
If a function returns a non-Deferred value, nothing happens. More
specifically, no function trace information is stored.
"""
t = Tracer()
t._trace(FakeFrame(), 'return', None)
assert not t._function_data
def test_do_not_trace_generators():
"""
If a generator function returns a Deferred, nothing happens. More
specifically, no function trace information is stored.
"""
t = Tracer()
t._trace(
FakeFrame(FakeCode(flags=inspect.CO_GENERATOR)),
'return', defer.Deferred())
assert not t._function_data
def test_do_not_trace_defer_module():
"""
If a function in twisted.internet.defer returns a Deferred, nothing
happens. More specifically, no function trace information is stored.
"""
t = Tracer()
t._trace(
FakeFrame(globals={'__name__': 'twisted.internet.defer'}),
'return', defer.Deferred())
assert not t._function_data
_frame_spam = FakeFrame(FakeCode('spam.py', 'spam'))
_frame_eggs = FakeFrame(FakeCode('eggs.py', 'eggs'), _frame_spam)
_frame_unwindGenerator = FakeFrame(
FakeCode('defer.py', 'unwindGenerator'),
_frame_eggs,
{'__name__': 'twisted.internet.defer'},
{'f': FakeFunction(FakeCode('sausage.py', 'sausage'))})
def test_trace_deferred_return_initial_setup():
"""
If a function returns a Deferred, nothing happens until the Deferred
fires. More specifically, no function trace information is stored.
"""
t = Tracer()
d = defer.Deferred()
t._trace(_frame_spam, 'return', d)
assert not t._function_data
def _trace_deferred_firing_after(clock, tracer, frame, seconds):
"""
Helper function to advance a clock and fire a Deferred.
"""
d = defer.Deferred()
tracer._trace(frame, 'call', None)
tracer._trace(frame, 'return', d)
clock.advance(seconds)
d.callback(None)
def test_trace_deferred_return():
"""
If a function returns a Deferred, after that Deferred fires, function trace
information is stored regarding the amount of time it took for that
Deferred to fire.
"""
clock = task.Clock()
t = Tracer(reactor=clock)
_trace_deferred_firing_after(clock, t, _frame_spam, 1.5)
assert t._function_data == {
('spam.py', 'spam'): ({}, 1500000),
}
def test_trace_deferred_return_with_caller():
"""
If the function returning the Deferred has a frame above it, that
information is stored as well.
"""
clock = task.Clock()
t = Tracer(reactor=clock)
_trace_deferred_firing_after(clock, t, _frame_eggs, 1.5)
assert t._function_data == {
('spam.py', 'spam'): ({
('eggs.py', 'eggs'): (1, 1500000),
}, 0),
('eggs.py', 'eggs'): ({}, 1500000),
}
def test_trace_deferred_return_with_multiple_calls():
"""
If the function(s) returning the Deferred(s) are called multiple times, the
timing data is summed.
"""
clock = task.Clock()
t = Tracer(reactor=clock)
_trace_deferred_firing_after(clock, t, _frame_spam, 0.5)
_trace_deferred_firing_after(clock, t, _frame_spam, 0.25)
_trace_deferred_firing_after(clock, t, _frame_eggs, 0.125)
assert t._function_data == {
('spam.py', 'spam'): ({
('eggs.py', 'eggs'): (1, 125000),
}, 750000),
('eggs.py', 'eggs'): ({}, 125000),
}
def test_trace_inlineCallbacks_detection():
"""
Tracer will detect the use of inlineCallbacks and rewrite the call stacks
to look better and contain more information.
"""
clock = task.Clock()
t = Tracer(reactor=clock)
_trace_deferred_firing_after(clock, t, _frame_unwindGenerator, 0.5)
assert t._function_data == {
('spam.py', 'spam'): ({
('eggs.py', 'eggs'): (1, 500000),
}, 0),
('eggs.py', 'eggs'): ({
('sausage.py', 'sausage'): (1, 500000),
}, 0),
('sausage.py', 'sausage'): ({}, 500000),
}
def test_tracer_calltree_output():
"""
Tracer's write_data method writes out calltree-formatted information.
"""
clock = task.Clock()
t = Tracer(reactor=clock)
_trace_deferred_firing_after(clock, t, _frame_spam, 0.5)
_trace_deferred_firing_after(clock, t, _frame_spam, 0.25)
_trace_deferred_firing_after(clock, t, _frame_eggs, 0.125)
sio = StringIO()
t.write_data(sio)
assert sio.getvalue() == textwrap.dedent("""\
events: Nanoseconds
fn=eggs eggs.py
0 125000
fn=spam spam.py
0 750000
cfn=eggs eggs.py
calls=1 0
0 125000
""")
class FakeSys(object):
tracer = None
def setprofile(self, trace):
self.tracer = trace
def getprofile(self):
return self.tracer
@pytest.fixture
def fakesys(monkeypatch):
fakesys = FakeSys()
monkeypatch.setattr('theseus._tracer.sys', fakesys)
return fakesys
def test_tracer_install(fakesys):
"""
Tracer's install method will install itself globally using sys.setprofile.
"""
t = Tracer()
t.install()
assert fakesys.tracer == t._trace
def test_tracer_wrapped_hook(fakesys):
"""
If a profile hook was set prior to calling Tracer's install method, it will
continue to be called by Tracer.
"""
calls = []
def tracer(frame, event, arg):
calls.append((frame, event, arg))
fakesys.tracer = tracer
t = Tracer()
t.install()
sentinel = object()
t._trace(sentinel, 'call', sentinel)
assert calls == [(sentinel, 'call', sentinel)]
def test_tracer_uninstall(fakesys):
"""
Tracer's install method will uninstall itself as well.
"""
t = Tracer()
t.install()
t.uninstall()
assert fakesys.tracer is None
def test_tracer_uninstall_with_other_hook(fakesys):
"""
If another profile hook was installed after the Tracer was installed, then
the profile hook will remain unchanged.
"""
t = Tracer()
t.install()
fakesys.tracer = sentinel = object()
t.uninstall()
assert fakesys.tracer is sentinel
def test_tracer_uninstall_with_other_hook_previously_installed(fakesys):
"""
If another profile hook was installed before the Tracer was installed, then
the profile hook will be restored to that profile hook.
"""
t = Tracer()
fakesys.tracer = sentinel = object()
t.install()
t.uninstall()
assert fakesys.tracer is sentinel
| 27.210332 | 79 | 0.649173 | 643 | 0.087198 | 0 | 0 | 140 | 0.018986 | 0 | 0 | 2,598 | 0.352319 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.