blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
80e8c98591b0afb1451043bb84bd90a39ef8b326
|
71012df2815a4666203a2d574f1c1745d5a9c6dd
|
/4 Django/solutions/mysite/polls/views.py
|
ee7ecc4a4c55eaec3ae5baa8abb4803fe3c76ed1
|
[] |
no_license
|
PdxCodeGuild/class_mouse
|
6c3b85ccf5ed4d0c867aee70c46af1b22d20a9e8
|
40c229947260134a1f9da6fe3d7073bee3ebb3f7
|
refs/heads/main
| 2023-03-23T14:54:39.288754 | 2021-03-20T01:48:21 | 2021-03-20T01:48:21 | 321,429,925 | 1 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,007 |
py
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from .models import Question, Choice
# Create your views here.
def index(request):
questions = Question.objects.order_by('-pub_date')
context = {
'questions': questions
}
return render(request, 'polls/index.html', context)
def detail(request, id):
question = get_object_or_404(Question, id=id)
context = {
'question': question
}
return render(request, 'polls/detail.html', context)
def results(request, id):
question = get_object_or_404(Question, id=id)
context = {
'question': question
}
return render(request, 'polls/results.html', context)
def vote(request, id):
selected_choice = request.POST['choice']
choice = get_object_or_404(Choice, id=selected_choice)
choice.votes += 1
choice.save()
return HttpResponseRedirect(reverse('polls:detail', args=(id,)))
|
[
"[email protected]"
] | |
52e8847e9de82e204fdf0c8cd2b6aae8698321bd
|
534b315921a7ad091aaef3ad9dd33691a570adad
|
/ex_03_02.py
|
92e6efbfca059085bac104c573028c8f2f8773d7
|
[] |
no_license
|
epicarts/Python-for-Everybody
|
42d91b66f6c5fbae47caabee98f64269ac4b2437
|
edbe916b0beb9087e2a4a57516ccb3a315ac95d7
|
refs/heads/master
| 2020-03-21T21:00:07.393582 | 2018-07-17T10:58:11 | 2018-07-17T10:58:11 | 139,041,394 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 322 |
py
|
sh = input("Enter Hours: ")
sr = input("Enter Rate: ")
try:
fh = float(sh)
fr = float(sr)
except:
print("Error, please enter numeric input")
quit()
if fh > 40 :
reg = fr * fh
otp = (fh - 40.0) * (fr * 0.5)
xp = reg + otp
else:
xp = fh * fr
print("Pay:", xp)
|
[
"[email protected]"
] | |
b737adc0f2b8afa966e318f561e92474527a3b0c
|
10d57ce051ca936f6822724a4e996d35f7cd269c
|
/third_party/blink/web_tests/external/wpt/tools/wptrunner/wptrunner/executors/executorwebdriver.py
|
ad79600aeb8f943f5c5621102df8b6e701eed303
|
[
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
billti/chromium
|
aea73afa192266460538df692e80dd3f749d2751
|
94fde1ddc4a9db7488fd646443688a88c178c158
|
refs/heads/master
| 2023-02-02T05:00:23.474800 | 2020-09-24T16:57:28 | 2020-09-24T16:57:28 | 298,350,654 | 0 | 0 |
BSD-3-Clause
| 2020-09-24T17:37:58 | 2020-09-24T17:37:57 | null |
UTF-8
|
Python
| false | false | 24,135 |
py
|
from __future__ import absolute_import
import json
import os
import socket
import threading
import time
import traceback
import uuid
from six.moves.urllib.parse import urljoin
from .base import (CallbackHandler,
CrashtestExecutor,
RefTestExecutor,
RefTestImplementation,
TestharnessExecutor,
TimedRunner,
strip_server)
from .protocol import (BaseProtocolPart,
TestharnessProtocolPart,
Protocol,
SelectorProtocolPart,
ClickProtocolPart,
SendKeysProtocolPart,
ActionSequenceProtocolPart,
TestDriverProtocolPart,
GenerateTestReportProtocolPart,
SetPermissionProtocolPart,
VirtualAuthenticatorProtocolPart)
from ..testrunner import Stop
import webdriver as client
from webdriver import error
here = os.path.dirname(__file__)
class WebDriverCallbackHandler(CallbackHandler):
unimplemented_exc = (NotImplementedError, client.UnknownCommandException)
class WebDriverBaseProtocolPart(BaseProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def execute_script(self, script, asynchronous=False):
method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
return method(script)
def set_timeout(self, timeout):
try:
self.webdriver.timeouts.script = timeout
except client.WebDriverException:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=2057
body = {"type": "script", "ms": timeout * 1000}
self.webdriver.send_session_command("POST", "timeouts", body)
@property
def current_window(self):
return self.webdriver.window_handle
def set_window(self, handle):
self.webdriver.window_handle = handle
def load(self, url):
self.webdriver.url = url
def wait(self):
while True:
try:
self.webdriver.execute_async_script("")
except (client.TimeoutException,
client.ScriptTimeoutException,
client.JavascriptErrorException):
# A JavascriptErrorException will happen when we navigate;
# by ignoring it it's possible to reload the test whilst the
# harness remains paused
pass
except (socket.timeout,
client.NoSuchWindowException,
client.UnknownErrorException,
IOError):
break
except Exception:
self.logger.error(traceback.format_exc())
break
class WebDriverTestharnessProtocolPart(TestharnessProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
self.runner_handle = None
with open(os.path.join(here, "runner.js")) as f:
self.runner_script = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
def load_runner(self, url_protocol):
if self.runner_handle:
self.webdriver.window_handle = self.runner_handle
url = urljoin(self.parent.executor.server_url(url_protocol),
"/testharness_runner.html")
self.logger.debug("Loading %s" % url)
self.webdriver.url = url
self.runner_handle = self.webdriver.window_handle
format_map = {"title": threading.current_thread().name.replace("'", '"')}
self.parent.base.execute_script(self.runner_script % format_map)
def close_old_windows(self):
self.webdriver.actions.release()
handles = [item for item in self.webdriver.handles if item != self.runner_handle]
for handle in handles:
try:
self.webdriver.window_handle = handle
self.webdriver.window.close()
except client.NoSuchWindowException:
pass
self.webdriver.window_handle = self.runner_handle
return self.runner_handle
def get_test_window(self, window_id, parent, timeout=5):
"""Find the test window amongst all the open windows.
This is assumed to be either the named window or the one after the parent in the list of
window handles
:param window_id: The DOM name of the Window
:param parent: The handle of the runner window
:param timeout: The time in seconds to wait for the window to appear. This is because in
some implementations there's a race between calling window.open and the
window being added to the list of WebDriver accessible windows."""
test_window = None
end_time = time.time() + timeout
while time.time() < end_time:
try:
# Try using the JSON serialization of the WindowProxy object,
# it's in Level 1 but nothing supports it yet
win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
win_obj = json.loads(win_s)
test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
except Exception:
pass
if test_window is None:
after = self.webdriver.handles
if len(after) == 2:
test_window = next(iter(set(after) - {parent}))
elif after[0] == parent and len(after) > 2:
# Hope the first one here is the test window
test_window = after[1]
if test_window is not None:
assert test_window != parent
return test_window
time.sleep(0.1)
raise Exception("unable to find test window")
def test_window_loaded(self):
"""Wait until the page in the new window has been loaded.
Hereby ignore Javascript execptions that are thrown when
the document has been unloaded due to a process change.
"""
while True:
try:
self.webdriver.execute_script(self.window_loaded_script, asynchronous=True)
break
except error.JavascriptErrorException:
pass
class WebDriverSelectorProtocolPart(SelectorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def elements_by_selector(self, selector):
return self.webdriver.find.css(selector)
def elements_by_selector_and_frame(self, element_selector, frame):
return self.webdriver.find.css(element_selector, frame=frame)
class WebDriverClickProtocolPart(ClickProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def element(self, element):
self.logger.info("click " + repr(element))
return element.click()
class WebDriverSendKeysProtocolPart(SendKeysProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_keys(self, element, keys):
try:
return element.send_keys(keys)
except client.UnknownErrorException as e:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=1999
if (e.http_status != 500 or
e.status_code != "unknown error"):
raise
return element.send_element_command("POST", "value", {"value": list(keys)})
class WebDriverActionSequenceProtocolPart(ActionSequenceProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_actions(self, actions):
self.webdriver.actions.perform(actions['actions'])
class WebDriverTestDriverProtocolPart(TestDriverProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_message(self, message_type, status, message=None):
obj = {
"type": "testdriver-%s" % str(message_type),
"status": str(status)
}
if message:
obj["message"] = str(message)
self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
class WebDriverGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def generate_test_report(self, message):
json_message = {"message": message}
self.webdriver.send_session_command("POST", "reporting/generate_test_report", json_message)
class WebDriverSetPermissionProtocolPart(SetPermissionProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def set_permission(self, descriptor, state, one_realm):
permission_params_dict = {
"descriptor": descriptor,
"state": state,
}
if one_realm is not None:
permission_params_dict["oneRealm"] = one_realm
self.webdriver.send_session_command("POST", "permissions", permission_params_dict)
class WebDriverVirtualAuthenticatorProtocolPart(VirtualAuthenticatorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def add_virtual_authenticator(self, config):
return self.webdriver.send_session_command("POST", "webauthn/authenticator", config)
def remove_virtual_authenticator(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s" % authenticator_id)
def add_credential(self, authenticator_id, credential):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/credential" % authenticator_id, credential)
def get_credentials(self, authenticator_id):
return self.webdriver.send_session_command("GET", "webauthn/authenticator/%s/credentials" % authenticator_id)
def remove_credential(self, authenticator_id, credential_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials/%s" % (authenticator_id, credential_id))
def remove_all_credentials(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials" % authenticator_id)
def set_user_verified(self, authenticator_id, uv):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/uv" % authenticator_id, uv)
class WebDriverProtocol(Protocol):
implements = [WebDriverBaseProtocolPart,
WebDriverTestharnessProtocolPart,
WebDriverSelectorProtocolPart,
WebDriverClickProtocolPart,
WebDriverSendKeysProtocolPart,
WebDriverActionSequenceProtocolPart,
WebDriverTestDriverProtocolPart,
WebDriverGenerateTestReportProtocolPart,
WebDriverSetPermissionProtocolPart,
WebDriverVirtualAuthenticatorProtocolPart]
def __init__(self, executor, browser, capabilities, **kwargs):
super(WebDriverProtocol, self).__init__(executor, browser)
self.capabilities = capabilities
self.url = browser.webdriver_url
self.webdriver = None
def connect(self):
"""Connect to browser via WebDriver."""
self.logger.debug("Connecting to WebDriver on URL: %s" % self.url)
host, port = self.url.split(":")[1].strip("/"), self.url.split(':')[-1].strip("/")
capabilities = {"alwaysMatch": self.capabilities}
self.webdriver = client.Session(host, port, capabilities=capabilities)
self.webdriver.start()
def teardown(self):
self.logger.debug("Hanging up on WebDriver session")
try:
self.webdriver.end()
except Exception as e:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.logger.debug(message)
self.webdriver = None
def is_alive(self):
try:
# Get a simple property over the connection, with 2 seconds of timeout
# that should be more than enough to check if the WebDriver its
# still alive, and allows to complete the check within the testrunner
# 5 seconds of extra_timeout we have as maximum to end the test before
# the external timeout from testrunner triggers.
self.webdriver.send_session_command("GET", "window", timeout=2)
except (socket.timeout, client.UnknownErrorException, client.InvalidSessionIdException):
return False
return True
def after_connect(self):
self.testharness.load_runner(self.executor.last_environment["protocol"])
class WebDriverRun(TimedRunner):
def set_timeout(self):
try:
self.protocol.base.set_timeout(self.timeout + self.extra_timeout)
except client.UnknownErrorException:
self.logger.error("Lost WebDriver connection")
return Stop
def run_func(self):
try:
self.result = True, self.func(self.protocol, self.url, self.timeout)
except (client.TimeoutException, client.ScriptTimeoutException):
self.result = False, ("EXTERNAL-TIMEOUT", None)
except (socket.timeout, client.UnknownErrorException):
self.result = False, ("CRASH", None)
except Exception as e:
if (isinstance(e, client.WebDriverException) and
e.http_status == 408 and
e.status_code == "asynchronous script timeout"):
# workaround for https://bugs.chromium.org/p/chromedriver/issues/detail?id=2001
self.result = False, ("EXTERNAL-TIMEOUT", None)
else:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.result = False, ("INTERNAL-ERROR", message)
finally:
self.result_flag.set()
class WebDriverTestharnessExecutor(TestharnessExecutor):
supports_testdriver = True
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
close_after_done=True, capabilities=None, debug_info=None,
supports_eager_pageload=True, cleanup_after_test=True,
**kwargs):
"""WebDriver-based executor for testharness.js tests"""
TestharnessExecutor.__init__(self, logger, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self, browser, capabilities)
with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
self.script_resume = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
self.close_after_done = close_after_done
self.window_id = str(uuid.uuid4())
self.supports_eager_pageload = supports_eager_pageload
self.cleanup_after_test = cleanup_after_test
def is_alive(self):
return self.protocol.is_alive()
def on_environment_change(self, new_environment):
if new_environment["protocol"] != self.last_environment["protocol"]:
self.protocol.testharness.load_runner(new_environment["protocol"])
def do_test(self, test):
url = self.test_url(test)
success, data = WebDriverRun(self.logger,
self.do_testharness,
self.protocol,
url,
test.timeout * self.timeout_multiplier,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_testharness(self, protocol, url, timeout):
format_map = {"url": strip_server(url)}
# The previous test may not have closed its old windows (if something
# went wrong or if cleanup_after_test was False), so clean up here.
parent_window = protocol.testharness.close_old_windows()
# Now start the test harness
protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
test_window = protocol.testharness.get_test_window(self.window_id,
parent_window,
timeout=5*self.timeout_multiplier)
self.protocol.base.set_window(test_window)
# Wait until about:blank has been loaded
protocol.base.execute_script(self.window_loaded_script, asynchronous=True)
handler = WebDriverCallbackHandler(self.logger, protocol, test_window)
protocol.webdriver.url = url
if not self.supports_eager_pageload:
self.wait_for_load(protocol)
while True:
result = protocol.base.execute_script(
self.script_resume % format_map, asynchronous=True)
# As of 2019-03-29, WebDriver does not define expected behavior for
# cases where the browser crashes during script execution:
#
# https://github.com/w3c/webdriver/issues/1308
if not isinstance(result, list) or len(result) != 2:
try:
is_alive = self.is_alive()
except client.WebDriverException:
is_alive = False
if not is_alive:
raise Exception("Browser crashed during script execution.")
done, rv = handler(result)
if done:
break
# Attempt to cleanup any leftover windows, if allowed. This is
# preferable as it will blame the correct test if something goes wrong
# closing windows, but if the user wants to see the test results we
# have to leave the window(s) open.
if self.cleanup_after_test:
protocol.testharness.close_old_windows()
return rv
def wait_for_load(self, protocol):
# pageLoadStrategy=eager doesn't work in Chrome so try to emulate in user script
loaded = False
seen_error = False
while not loaded:
try:
loaded = protocol.base.execute_script("""
var callback = arguments[arguments.length - 1];
if (location.href === "about:blank") {
callback(false);
} else if (document.readyState !== "loading") {
callback(true);
} else {
document.addEventListener("readystatechange", () => {if (document.readyState !== "loading") {callback(true)}});
}""", asynchronous=True)
except client.JavascriptErrorException:
# We can get an error here if the script runs in the initial about:blank
# document before it has navigated, with the driver returning an error
# indicating that the document was unloaded
if seen_error:
raise
seen_error = True
class WebDriverRefTestExecutor(RefTestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
RefTestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
self.implementation = RefTestImplementation(self)
self.close_after_done = close_after_done
self.has_window = False
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "reftest-wait"}
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()
def do_test(self, test):
width_offset, height_offset = self.protocol.webdriver.execute_script(
"""return [window.outerWidth - window.innerWidth,
window.outerHeight - window.innerHeight];"""
)
try:
self.protocol.webdriver.window.position = (0, 0)
except client.InvalidArgumentException:
# Safari 12 throws with 0 or 1, treating them as bools; fixed in STP
self.protocol.webdriver.window.position = (2, 2)
self.protocol.webdriver.window.size = (800 + width_offset, 600 + height_offset)
result = self.implementation.run_test(test)
return self.convert_result(test, result)
def screenshot(self, test, viewport_size, dpi, page_ranges):
# https://github.com/web-platform-tests/wpt/issues/7135
assert viewport_size is None
assert dpi is None
return WebDriverRun(self.logger,
self._screenshot,
self.protocol,
self.test_url(test),
test.timeout,
self.extra_timeout).run()
def _screenshot(self, protocol, url, timeout):
self.protocol.base.load(url)
self.protocol.base.execute_script(self.wait_script, True)
screenshot = self.protocol.webdriver.screenshot()
# strip off the data:img/png, part of the url
if screenshot.startswith("data:image/png;base64,"):
screenshot = screenshot.split(",", 1)[1]
return screenshot
class WebDriverCrashtestExecutor(CrashtestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
CrashtestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "test-wait"}
def do_test(self, test):
timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
else None)
success, data = WebDriverRun(self.logger,
self.do_crashtest,
self.protocol,
self.test_url(test),
timeout,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_crashtest(self, protocol, url, timeout):
protocol.base.load(url)
protocol.base.execute_script(self.wait_script, asynchronous=True)
return {"status": "PASS",
"message": None}
|
[
"[email protected]"
] | |
390ca1273fd423bf42edd83f7b629397e189fd4b
|
bca124bc2cecb5d3dec17c9666ec00d29fadf517
|
/i03Python-API-Development-Fundamentals/ch8pgination_search_order/resources/user.py
|
438080c8c6bf226f01b9b4c26477bcc73ceb2276
|
[] |
no_license
|
greatabel/FlaskRepository
|
1d1fdb734dd25d7273136206727c76b2742a915f
|
85d402bc7b4218d3ae33d90f4a51dbac474f70ee
|
refs/heads/master
| 2023-08-19T18:30:33.585509 | 2023-08-07T14:12:25 | 2023-08-07T14:12:25 | 60,396,096 | 5 | 0 | null | 2023-02-15T18:18:42 | 2016-06-04T06:11:32 |
JavaScript
|
UTF-8
|
Python
| false | false | 5,782 |
py
|
import os
from flask import request, url_for, render_template
from flask_restful import Resource
from flask_jwt_extended import jwt_optional, get_jwt_identity, jwt_required
from http import HTTPStatus
from webargs import fields
from webargs.flaskparser import use_kwargs
from extensions import image_set
from mailgun import MailgunApi
from models.recipe import Recipe
from models.user import User
from schemas.user import UserSchema
from schemas.recipe import RecipeSchema, RecipePaginationSchema
from marshmallow import ValidationError
from utils import generate_token, verify_token, save_image
user_schema = UserSchema()
user_public_schema = UserSchema(exclude=('email', ))
user_avatar_schema = UserSchema(only=('avatar_url', ))
recipe_list_schema = RecipeSchema(many=True)
recipe_pagination_schema = RecipePaginationSchema()
domain = os.environ.get('YOUR_DOMAIN_NAME', '')
api_key = os.environ.get('YOUR_API_KEY', '')
mailgun = MailgunApi(domain=domain, api_key=api_key)
class UserListResource(Resource):
def post(self):
json_data = request.get_json()
# data, errors = user_schema.load(data=json_data)
try:
data = user_schema.load(data=json_data)
username = data.get('username')
email = data.get('email')
if User.get_by_username(username):
return {'message': 'username already used'}, HTTPStatus.BAD_REQUEST
if User.get_by_email(email):
return {'message': 'email already used'}, HTTPStatus.BAD_REQUEST
user = User(**data)
user.save()
token = generate_token(user.email, salt='activate')
subject = '请确认你的注册'
link = url_for('useractivateresource',
token=token,
_external=True)
# text = '感谢使用 SmileCook! 请点击确认链接: {}'.format(link)
# text Body of the message. (text version)/ html:Body of the message. (HTML version)
text = None
mailgun.send_email(to=user.email,
subject=subject,
text=text,
html=render_template('email/confirmation.html', link=link))
data = user_schema.dump(user)
return data, HTTPStatus.CREATED
except ValidationError as err:
return {'message': err.messages}, HTTPStatus.BAD_REQUEST
# if errors:
# return {'message': 'Validation errors', 'errors': errors}, HTTPStatus.BAD_REQUEST
class UserResource(Resource):
@jwt_optional
def get(self, username):
user = User.get_by_username(username=username)
if user is None:
return {'message': 'user not found'}, HTTPStatus.NOT_FOUND
current_user = get_jwt_identity()
# print('current_user=', current_user, user.id)
if current_user == user.id:
data = user_schema.dump(user)
else:
data = user_public_schema.dump(user)
return data, HTTPStatus.OK
class MeResource(Resource):
@jwt_required
def get(self):
user = User.get_by_id(id=get_jwt_identity())
data = user_schema.dump(user)
return data, HTTPStatus.OK
example_args = {
'page': fields.Int(missing=1),
'per_page': fields.Int(missing=10),
'visibility': fields.String(missing='public')
}
class UserRecipeListResource(Resource):
#visibility 和 username 顺序很重要,错了不行
@jwt_optional
@use_kwargs(example_args, location="query")
def get(self, page, per_page, visibility, username):
print('visibility=', visibility, page, per_page)
user = User.get_by_username(username=username)
if user is None:
return {'message': 'User not found'}, HTTPStatus.NOT_FOUND
current_user = get_jwt_identity()
print(current_user, user.id, visibility)
if current_user == user.id and visibility in ['all', 'private']:
pass
else:
visibility = 'public'
paginated_recipes = Recipe.get_all_by_user(user_id=user.id, page=page, per_page=per_page, visibility=visibility)
# print('recipes=', recipes)
data = recipe_pagination_schema.dump(paginated_recipes)
return data, HTTPStatus.OK
class UserActivateResource(Resource):
def get(self, token):
email = verify_token(token, salt='activate')
if email is False:
return {'message': 'Invalid token or token expired'}, HTTPStatus.BAD_REQUEST
user = User.get_by_email(email=email)
if not user:
return {'message': 'User not found'}, HTTPStatus.NOT_FOUND
if user.is_active is True:
return {'message': 'The user account is already activated'}, HTTPStatus.BAD_REQUEST
user.is_active = True
user.save()
return {}, HTTPStatus.NO_CONTENT
class UserAvatarUploadResource(Resource):
@jwt_required
def put(self):
file = request.files.get('avatar')
if not file:
return {'message': 'Not a valid image'}, HTTPStatus.BAD_REQUEST
if not image_set.file_allowed(file, file.filename):
return {'message': 'File type not allowed'}, HTTPStatus.BAD_REQUEST
user = User.get_by_id(id=get_jwt_identity())
if user.avatar_image:
avatar_path = image_set.path(folder='avatars', filename=user.avatar_image)
if os.path.exists(avatar_path):
os.remove(avatar_path)
filename = save_image(image=file, folder='avatars')
user.avatar_image = filename
user.save()
return user_avatar_schema.dump(user), HTTPStatus.OK
|
[
"[email protected]"
] | |
fcd7cbc53993e850cc4ab7e8288031e8e71969e8
|
83de24182a7af33c43ee340b57755e73275149ae
|
/aliyun-python-sdk-sgw/aliyunsdksgw/request/v20180511/EnableGatewayLoggingRequest.py
|
20c5195c01a28b516aeeb9a542f39aea43c1267c
|
[
"Apache-2.0"
] |
permissive
|
aliyun/aliyun-openapi-python-sdk
|
4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f
|
83fd547946fd6772cf26f338d9653f4316c81d3c
|
refs/heads/master
| 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 |
NOASSERTION
| 2023-09-14T08:51:06 | 2015-07-23T09:39:45 |
Python
|
UTF-8
|
Python
| false | false | 1,638 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdksgw.endpoint import endpoint_data
class EnableGatewayLoggingRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'sgw', '2018-05-11', 'EnableGatewayLogging','hcs_sgw')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_GatewayId(self):
return self.get_query_params().get('GatewayId')
def set_GatewayId(self,GatewayId):
self.add_query_param('GatewayId',GatewayId)
|
[
"[email protected]"
] | |
ae84bb1551d919d5a78e142d3902aadd859be877
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03475/s980181713.py
|
a507864b19aaa3ae22e296c2df9129769450df61
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 506 |
py
|
#!/usr/bin/env python3
N = int(input())
data = []
for _ in range(N - 1):
data.append(tuple(map(int, input().split())))
for i in range(N - 1):
c_i, s_i, f_i = data[i]
ans = c_i + s_i
if i == N - 2:
print(ans)
continue
for j in range(i + 1, N - 1):
c_j, s_j, f_j = data[j]
if ans >= s_j:
tmp = (ans - s_j) % f_j
ans += c_j if tmp == 0 else f_j - tmp + c_j
else:
ans += s_j - ans + c_j
print(ans)
print(0)
|
[
"[email protected]"
] | |
eb46806d343bb2682eec400c2d940d7baf75857f
|
819ea4165220ecaed5b168e37773282195501a38
|
/src/snakemake/rules/bcftools_stats.smk
|
d5aa48244c3c2de15c3a78a09a1ed77da8eda257
|
[
"MIT"
] |
permissive
|
guillaumecharbonnier/mw-lib
|
662fe4f1ca28ed48554971d5fbf47bb11bb210d9
|
870f082431fb92d0aeb0a28f9f1e88c448aebd8a
|
refs/heads/master
| 2023-07-06T05:36:42.436637 | 2023-06-26T10:34:38 | 2023-06-26T10:34:38 | 198,626,514 | 0 | 1 |
MIT
| 2023-04-14T07:06:10 | 2019-07-24T11:57:07 |
Python
|
UTF-8
|
Python
| false | false | 1,285 |
smk
|
rule bcftools_stats_dev:
input:
"out/scalpel/discovery_--single_--bed_chr1:10000000-60000000_fa-genome-hg19-main-chr/samtools/index/samtools/sort/samtools/view_sam_to_bam/bwa/mem_se_fa-genome-hg19-main-chr/gunzip/to-stdout/ln/alias/sst/all_samples/fastq/Jurkat_SRR1057274_H3K27ac/variants.indel.vcf",
"out/scalpel/discovery_--single_--bed_chr1:10000000-60000000_fa-genome-hg19-main-chr/samtools/index/samtools/sort/samtools/view_sam_to_bam/bwa/mem_se_fa-genome-hg19-main-chr/gunzip/to-stdout/ln/alias/sst/all_samples/fastq/T11C_H3K27ac/variants.indel.vcf",
"out/scalpel/discovery_--single_--bed_chr1:10000000-60000000_fa-genome-hg19-main-chr/samtools/index/samtools/sort/samtools/view_sam_to_bam/bwa/mem_se_fa-genome-hg19-main-chr/gunzip/to-stdout/ln/alias/sst/all_samples/fastq/T11C_all_DNA_samples/variants.indel.vcf"
output:
"out/bcftools/stats_dev/stats.out"
conda:
"../envs/bcftools.yaml"
shell:
"bcftools stats {input} > {output}"
rule bcftools_plot_vcfstats:
input:
"out/bcftools/stats_dev/stats.out"
output:
touch("out/bcftools/plot-vcfstats/done")
conda:
"../envs/bcftools.yaml"
shell:
"""
plot-vcfstats -p out/bcftools/plot-vcfstats {input}
"""
|
[
"[email protected]"
] | |
124696fb951a7fffc9df0dab4298e430e899cbba
|
8e46d95e040f498a9bae3d3bb6c645728cf51bda
|
/fll/backend/abstract.py
|
6b28ee5b0a139bf4e03a39c590722251df5c378a
|
[
"Apache-2.0"
] |
permissive
|
matech96/federated-learning-library
|
815a0fd52274d7eb05a6707d03eea1d313b8d5dd
|
7ce769bab8cc62aaee6054aaef992e845ec9362f
|
refs/heads/main
| 2023-03-19T03:09:29.773009 | 2021-02-28T14:48:03 | 2021-02-28T14:48:03 | 301,724,428 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,288 |
py
|
"""Our code supports multiple deep learning frameworks. This module describes the interfaces, that we have to create
for a framework."""
from __future__ import annotations
from abc import ABC, abstractmethod
from pathlib import Path
from typing import List, Dict, Optional, Callable, Tuple
class AbstractBackendFactory(ABC):
"""An abstract factory, that puts the deep learning framework specific object into a container, that can be
freely passed around in the rest of the code. Needs to be inherited for a specific framework.
.. seealso:: :class:`~AbstractBackendOperations`
"""
@classmethod
@abstractmethod
def create_data_loader(cls, data_loader) -> AbstractDataLoader:
"""Stores the data loader in an :class:`~AbstractDataLoader`.
:param data_loader: Deep learning framework specific data loader.
:return: Wrapped data loader.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_loss(cls, loss, name: str) -> AbstractLoss:
"""Stores the loss function in an :class:`~AbstractLoss`.
:param loss: Deep learning framework specific loss function.
:param name: Name of the loss function.
:return: Wrapped loss function.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_metric(cls, metric, name: str) -> AbstractMetric:
"""Stores the metric in an :class:`~AbstractMetric`.
:param metric: Deep learning framework specific metric.
:param name: Name of the metric.
:return: Wrapped metric.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_model(cls, model) -> AbstractModel:
"""Stores the model in an :class:`~AbstractModel`.
:param model: Deep learning framework specific model.
:return: Wrapped model.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_model_state(cls, model_state) -> AbstractModelState:
"""Stores the model state in an :class:`~AbstractModelState`.
:param model_state: Deep learning framework specific model state.
:return: Wrapped model state.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_opt(cls, opt) -> AbstractOpt:
"""Stores the optimizer in an :class:`~AbstractOpt`.
:param opt: Deep learning framework specific optimizer.
:return: Wrapped optimizer.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def create_opt_state(cls, opt_state) -> AbstractOptState:
"""Stores the optimizer state in an :class:`~AbstractOptState`.
:param opt_state: Deep learning framework specific optimizer state.
:return: Wrapped optimizer state.
"""
raise NotImplementedError()
class AbstractBackendOperations(ABC):
"""The deep learning framework specific calculations - that can't be linked to a specific class - are collected in
this class. Needs to be inherited for a specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
@classmethod
@abstractmethod
def train_epoch(cls, model: AbstractModel, opt: AbstractOpt, loss: AbstractLoss, # noqa: R0913
data_loader: AbstractDataLoader, metrics: List[AbstractMetric]) -> Dict[str, float]: # noqa: R0913
"""Trains the model for 1 round.
:param model: The model to be trained.
:param opt: The optimization algorithm.
:param loss: The loss function.
:param data_loader: The training data.
:param metrics: The metrics to be measured.
:return: Dictionary, where the key is the name of the metric and the value is a float or int.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def eval(cls, model: AbstractModel, data_loader: AbstractDataLoader, metrics: List[AbstractMetric]) \
-> Dict[str, float]:
"""Evaluates the model on the provided data.
:param model: Model to be evaluated.
:param data_loader: Data to evaluate on. (Should include preprocessing.)
:param metrics: Metrics of the evaluation.
:return: Dictionary, where the key is the name of the metric and the value is a float or int.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def cumulative_avg_model_state(cls, state_0: Optional[AbstractModelState], state_1: AbstractModelState,
n_states_0: int) -> AbstractModelState:
"""This function is useful to calculate the average of many model states, but without needing to keep all of
them in memory. With this function you only need two model states simultaneously to calculate the average.
:param state_0: State form previous cumulative steps. Can be None. If None, return state_1
:param state_1: New state.
:param n_states_0: Number of states averaged in state_0.
:return: The uniform average of all the states in state_0 and state_1.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def cumulative_avg_opt_state(cls, state_0: Optional[AbstractOptState], state_1: AbstractOptState,
n_states_0: int) -> AbstractOptState:
"""This function is useful to calculate the average of many optimizer states, but without needing to keep all of
them in memory. With this function you only need two optimizer states simultaneously to calculate the average.
:param state_0: State form previous cumulative steps. Can be None. If None, return state_1
:param state_1: New state.
:param n_states_0: Number of states averaged in state_0.
:return: The uniform average of all the states in state_0 and state_1.
"""
raise NotImplementedError()
class AbstractModelOptFactory(ABC):
"""Abstract class for creating a framework specific model and optimizer."""
def __init__(self, model_cls: Callable, opt_cls: Callable):
"""
:param model_cls: Callable, that returns the framework specific model.
:param opt_cls: Callable, that returns the framework specific optimizer.
"""
self.model_cls = model_cls
self.opt_cls = opt_cls
@abstractmethod
def make_objects(self) -> Tuple[AbstractModel, AbstractOpt]:
"""
:return: The created model and optimizer in a tuple.
"""
raise NotImplementedError()
class AbstractDataLoader(ABC):
"""Abstract class for containing a framework specific data loader. A data loader can yield a batch of data for
training or inference. In federated learning, each clients has its own data loader. Needs to be inherited for a
specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
class AbstractLoss(ABC):
"""Abstract class for containing a framework specific, callable loss function and its name. Needs to be inherited
for a specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
def __init__(self, name: str):
"""
:param name: Name of the loss function.
"""
self.name = name
class AbstractMetric(ABC):
"""Abstract class for containing a framework specific, callable metric function. Needs to be inherited for a
specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
def __init__(self, name: str):
"""
:param name: Name of the metric.
"""
self.name = name
class AbstractModel(ABC):
"""Abstract class for containing a framework specific a model. Needs to be inherited for a specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
@abstractmethod
def get_state(self) -> AbstractModelState:
"""Returns the state of the model.
:return: State of the model.
"""
@abstractmethod
def load_state(self, state: AbstractModelState):
"""Loads the state of the model.
:param state: State of the model.
"""
class AbstractModelState(ABC):
"""Abstract class for containing a framework specific model state. The model state is a snapshot of the model
taken during it's training. The model state doesn't include the optimizer. Needs to be inherited for a specific
framework.
.. seealso:: :class:`~AbstractBackendFactory` :class:`~AbstractOptState`
"""
@classmethod
@abstractmethod
def load(cls, path: Path) -> AbstractModelState:
"""Loads the model state from a file.
:param path: Path to the file.
:return: A new object with the loaded model state.
"""
@abstractmethod
def save(self, path: Path):
"""Save to a file.
:param path: Path to the file.
"""
class AbstractOpt(ABC):
"""Abstract class for containing a framework specific optimizer. Needs to be inherited for a specific framework.
.. seealso:: :class:`~AbstractBackendFactory`
"""
@abstractmethod
def get_state(self) -> AbstractOptState:
"""Returns the state of the optimizer.
:return: State of the optimizer.
"""
@abstractmethod
def load_state(self, state: AbstractOptState):
"""Loads the state of the optimizer.
:return: State of the optimizer.
"""
class AbstractOptState(ABC):
"""Abstract class for containing a framework specific optimizer state. The optimizer state is a snapshot of the
optimizer taken during training. In the case of momentum optimizer, the optimizer state is the momentum value.
Needs to be inherited for a specific framework.
.. seealso:: :class:`~AbstractBackendFactory` :class:`~AbstractModelState`
"""
@classmethod
@abstractmethod
def load(cls, path: Path) -> AbstractOptState:
"""Loads the state from the disk.
:param path: Path to the file.
:return: the loaded state
"""
@abstractmethod
def save(self, path: Path):
"""Saves the state to the disk.
:param path: Path to the file.
"""
|
[
"[email protected]"
] | |
8936e6c360a6169bb413fbc6ec891dedb2385f3a
|
c317312696645e061d955148058267dea10c9743
|
/backend/home/migrations/0002_load_initial_data.py
|
147470d221ed4d67a3edd73ef542a9b807f60286
|
[] |
no_license
|
lorence-crowdbotics/shiny-block-1
|
96dc1fb4af1bae7eb535d1db25430a8114c124eb
|
64452c85dd1ebb6437437a637f4dff0402f57a9c
|
refs/heads/master
| 2023-02-08T10:32:14.106980 | 2021-01-04T11:51:18 | 2021-01-04T11:51:18 | 326,667,249 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,298 |
py
|
from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "Shiny Block"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">Shiny Block</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "shiny-block-1.herokuapp.com"
site_params = {
"name": "Shiny Block",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
|
[
"[email protected]"
] | |
99f471c3fbb40c19903e3f7b38741578d0324710
|
2e74c7339c63385172629eaa84680a85a4731ee9
|
/infertility/male_attr.py
|
59775495fac01fe5138790d83275bb10332c138e
|
[] |
no_license
|
zhusui/ihme-modeling
|
04545182d0359adacd22984cb11c584c86e889c2
|
dfd2fe2a23bd4a0799b49881cb9785f5c0512db3
|
refs/heads/master
| 2021-01-20T12:30:52.254363 | 2016-10-11T00:33:36 | 2016-10-11T00:33:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,587 |
py
|
import os
import argparse
os.chdir(os.path.dirname(os.path.realpath(__file__)))
from job_utils import draws, parsers
##############################################################################
# class to calculate attribution
##############################################################################
class AttributeMale(draws.SquareImport):
def __init__(self, me_map, **kwargs):
# super init
super(AttributeMale, self).__init__(**kwargs)
# store data by me in this dict key=me_id, val=dataframe
self.me_map = me_map
self.me_dict = {}
# import every input
for v in me_map.values():
inputs = v.get("srcs", {})
for me_id in inputs.values():
self.me_dict[me_id] = self.import_square(
gopher_what={"modelable_entity_ids": [me_id]},
source="dismod")
def calc_residual(self):
# compile keys
env_prim_key = self.me_map["env"]["srcs"]["prim"]
env_sec_key = self.me_map["env"]["srcs"]["sec"]
kline_bord_key = self.me_map["kline"]["srcs"]["bord"]
kline_mild_key = self.me_map["kline"]["srcs"]["mild"]
kline_asym_key = self.me_map["kline"]["srcs"]["asym"]
idio_prim_key = self.me_map["idio"]["trgs"]["prim"]
idio_sec_key = self.me_map["idio"]["trgs"]["sec"]
# sum up klinefelter
sigma_kline = (
self.me_dict[kline_bord_key] + self.me_dict[kline_mild_key] +
self.me_dict[kline_asym_key])
# subtract klinefleter from total primary
self.me_dict[idio_prim_key] = self.me_dict[env_prim_key] - sigma_kline
self.me_dict[idio_sec_key] = self.me_dict[env_sec_key]
##############################################################################
# function to run attribution
##############################################################################
def male_attribution(me_map, year_id, out_dir):
# declare calculation dimensions
dim = AttributeMale.default_idx_dmnsns
dim["year_id"] = year_id
dim["measure_id"] = [5]
dim["sex_id"] = [1]
# run attribution
attributer = AttributeMale(me_map=me_map, idx_dmnsns=dim)
attributer.calc_residual()
# save results to disk
for mapper in me_map.values():
outputs = mapper.get("trgs", {})
for me_id in outputs.values():
fname = str(year_id[0]) + ".h5"
out_df = attributer.me_dict[me_id].reset_index()
out_df.to_hdf(os.path.join(out_dir, str(me_id), fname), key="data",
format="table", data_columns=dim.keys())
##############################################################################
# when called as a script
##############################################################################
if __name__ == "__main__":
# parse command line args
parser = argparse.ArgumentParser()
parser.add_argument("--me_map", help="json style string map of ops",
required=True, type=parsers.json_parser)
parser.add_argument("--out_dir", help="root directory to save stuff",
required=True)
parser.add_argument("--year_id", help="which location to use",
type=parsers.int_parser, nargs="*", required=True)
args = vars(parser.parse_args())
# call function
male_attribution(me_map=args["me_map"], out_dir=args["out_dir"],
year_id=args["year_id"])
|
[
"[email protected]"
] | |
d592e11371f738e326662c96b83751e6fe5f369f
|
f6bc15034ee1809473279c87e13cc3131bc3675c
|
/reader/migrations/0008_chapter_views.py
|
cb0aeea0af86abf7285e39283b5f7ac21f6a41cf
|
[
"MIT"
] |
permissive
|
mangadventure/MangAdventure
|
d92e4c184d1ad91983cf650aa7fa584ba9b977ce
|
e9da91d0309eacca9fbac8ef72356fe35407b795
|
refs/heads/master
| 2023-07-20T04:54:49.215457 | 2023-07-14T15:34:20 | 2023-07-14T15:34:20 | 144,012,269 | 70 | 16 |
MIT
| 2022-08-13T12:22:39 | 2018-08-08T12:43:19 |
Python
|
UTF-8
|
Python
| false | false | 444 |
py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('reader', '0007_series_licensed')]
operations = [
migrations.AddField(
model_name='chapter',
name='views',
field=models.PositiveIntegerField(
db_index=True, default=0, editable=False,
help_text='The total views of the chapter.'
),
),
]
|
[
"[email protected]"
] | |
7f7ae6f458cc8b68f26e89c645ab071ca90deb65
|
0199004d124f05c820a39af7914d57c3b53a44ff
|
/instagram/urls.py
|
96937019d58b89e8d2fa9c66381ddbb822b51d97
|
[
"MIT"
] |
permissive
|
AnumAsif/instagram
|
ae6e4ffb7ce7aa2df5025f19fd25eef73fb62702
|
619731f799109b216e6ae0f75a4edd8057aa340c
|
refs/heads/master
| 2022-12-10T13:03:39.219406 | 2019-03-14T09:30:33 | 2019-03-14T09:30:33 | 174,493,800 | 0 | 0 | null | 2022-12-08T04:51:45 | 2019-03-08T07:58:40 |
Python
|
UTF-8
|
Python
| false | false | 1,046 |
py
|
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from django.conf import settings
from django.conf.urls.static import static
from . import views
urlpatterns = [
url(r'^home/$', views.home, name = 'home'),
url(r'^$',views.signup, name='signup'),
url(r'^activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.activate, name='activate'),
url(r'profile/(?P<username>\w+)', views.profile, name='profile'),
url(r'post/$', views.addpost, name='add_post'),
url(r'^likes/(\d+)/$', views.like, name='like'),
url(r'^comments/(\d+)/$', views.comment, name='add_comment'),
url(r'^search/', views.search, name="search"),
url(r'^accounts/edit/',views.edit_profile, name='edit_profile'),
url(r'^image/(?P<image_id>\d+)', views.image, name='image'),
url(r'^profile/(?P<user>\w+)/$', views.follow, name='follow')
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
[
"[email protected]"
] | |
2733e509636cffd4ec0fd9d7f66812a89293095d
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/blockEvaluator/some_package/module_in_package.py
|
1ed3bcb7cff0505bc6deccb4d4d97bfc2799be8b
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 |
Apache-2.0
| 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null |
UTF-8
|
Python
| false | false | 66 |
py
|
__author__ = 'Ilya.Kazakevich'
VARIABLE_IN_PACKAGE_MODULE = "foo"
|
[
"[email protected]"
] | |
77f2499232cafc4fe32a56e198bb85a9b79b4c31
|
3fbbd07f588aaeca78f18a4567b2173ce0154a85
|
/contours.py
|
6a57fb5fac998614e8a1a363cd6813b7e554a908
|
[
"MIT"
] |
permissive
|
MuAuan/read_keiba_paper
|
d0af6e7454da04e89ad3080649df0436f09e26a6
|
b7c523f5880d1b4d1d397450baaefddbbdd6cfff
|
refs/heads/master
| 2020-06-10T23:51:11.406246 | 2019-07-30T07:20:57 | 2019-07-30T07:20:57 | 193,795,441 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,974 |
py
|
import numpy as np
import cv2
import matplotlib.pyplot as plt
import os, shutil, time, cv2, math
import csv
from operator import attrgetter
from operator import itemgetter
output_root_path = './text_detection/data'
if not os.path.exists(output_root_path):os.mkdir(output_root_path)
output_root_path += '/takaraduka'
if not os.path.exists(output_root_path):os.mkdir(output_root_path)
output_root_path += '/20190622_9'
if not os.path.exists(output_root_path):os.mkdir(output_root_path)
output_root_path += '/'
input_original_data = './text_detection/data/raw/kiseki.jpg' #kiseki_oosaka.jpg'
img = cv2.imread(input_original_data)
cv2.imshow("img",img)
h, s, gray = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV))
size = (3, 3)
blur = cv2.GaussianBlur(gray, size, 0)
cv2.imwrite(output_root_path + '1_blur.jpg', blur)
lap = cv2.Laplacian(blur, cv2.CV_8UC1)
cv2.imwrite(output_root_path + '2_laplacian.jpg', lap)
# Otsu's thresholding
ret2, th2 = cv2.threshold(lap, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU) #lap
cv2.imwrite(output_root_path + '3_th2.jpg', th2)
kernel = np.ones((3, 8), np.uint8) #(3, 20)
closing = cv2.morphologyEx(th2, cv2.MORPH_CLOSE, kernel)
cv2.imwrite(output_root_path + '4_closing.jpg', closing)
kernel = np.ones((3, 3), np.uint8)
dilation = cv2.dilate(closing, kernel, iterations = 1) #closing
cv2.imwrite(output_root_path + '5_dilation.jpg', dilation)
erosion = cv2.erode(dilation, kernel, iterations = 1)
cv2.imwrite(output_root_path + '6_erosion.jpg', erosion)
lap2 = cv2.Laplacian(erosion, cv2.CV_8UC1)
cv2.imwrite(output_root_path + '7_laplacian_2.jpg', lap2)
contours, hierarchy = cv2.findContours(lap2, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) #CV_RETR_TREE
min_area = 30 #img.shape[0] * img.shape[1] * 1e-3 #-4
max_area = 2500 #img.shape[0] * img.shape[1] * 1e-3 #-4
tmp = img.copy()
tmp2 = img.copy()
rect=[]
for i, contour in enumerate(contours):
re = cv2.boundingRect(contour)
print(re)
rect.append(re)
rect=sorted(rect, key=itemgetter(0))
with open(output_root_path +'./rect.csv', 'w', newline='') as f:
writer = csv.writer(f)
#if len(contours) > 0:
for i in range(len(rect)):
print(rect[i][0],rect[i][1],rect[i][2],rect[i][3])
if rect[i][2] < 5 or rect[i][3] < 5:
continue
area = (rect[i][3])*(rect[i][2])
if area < min_area or area > max_area:
continue
roi = tmp[rect[i][1]:rect[i][1]+rect[i][3], rect[i][0]:rect[i][0]+rect[i][2]]
roi=cv2.resize(roi,(5*rect[i][2],5*rect[i][3]),interpolation=cv2.INTER_CUBIC)
cv2.imshow("roi",roi)
img_dst=cv2.rectangle(tmp, (rect[i][0], rect[i][1]), (rect[i][0]+rect[i][2], rect[i][1]+rect[i][3]), (0, 255, 0), 2)
cv2.imshow("IMAGE",img_dst)
writer.writerow(map(lambda x: x, rect[i]))
plt.imshow(tmp)
plt.pause(1)
cv2.imwrite(output_root_path + '8_1_findContours.jpg', tmp)
cv2.imwrite(output_root_path + '8_2_findContours.jpg', tmp2)
|
[
"[email protected]"
] | |
1709b78294401d0770a729fe519e3f8fa7e610d5
|
f66a33f8cdd8286320da730be67c89ee00d83d8d
|
/configs/example/gem5_library/x86-gapbs-benchmarks.py
|
6ab37479f92b9a71af448ca6425938b461df8701
|
[
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"MIT"
] |
permissive
|
H2020-COSSIM/cgem5
|
0d5812632757e6146f7852c9bf4abe4e9628296a
|
1222cc0c5618875e048f288e998187c236508a64
|
refs/heads/main
| 2023-05-13T14:08:01.665322 | 2023-05-08T08:39:50 | 2023-05-08T08:39:50 | 468,039,890 | 3 | 2 |
BSD-3-Clause
| 2022-10-12T14:29:33 | 2022-03-09T18:05:40 |
C++
|
UTF-8
|
Python
| false | false | 8,411 |
py
|
# Copyright (c) 2021 The Regents of the University of California.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Script to run GAPBS benchmarks with gem5. The script expects the
benchmark program and the simulation size to run. The input is in the format
<benchmark_prog> <size> <synthetic>
The system is fixed with 2 CPU cores, MESI Two Level system cache and 3 GB
DDR4 memory. It uses the x86 board.
This script will count the total number of instructions executed
in the ROI. It also tracks how much wallclock and simulated time.
Usage:
------
```
scons build/X86/gem5.opt
./build/X86/gem5.opt \
configs/example/gem5_library/x86-gabps-benchmarks.py \
--benchmark <benchmark_name> \
--synthetic <synthetic> \
--size <simulation_size/graph_name>
```
"""
import argparse
import time
import sys
import m5
from m5.objects import Root
from gem5.utils.requires import requires
from gem5.components.boards.x86_board import X86Board
from gem5.components.memory import DualChannelDDR4_2400
from gem5.components.processors.simple_switchable_processor import (
SimpleSwitchableProcessor,
)
from gem5.components.processors.cpu_types import CPUTypes
from gem5.isas import ISA
from gem5.coherence_protocol import CoherenceProtocol
from gem5.resources.resource import Resource
from gem5.simulate.simulator import Simulator
from gem5.simulate.exit_event import ExitEvent
requires(
isa_required=ISA.X86,
coherence_protocol_required=CoherenceProtocol.MESI_TWO_LEVEL,
kvm_required=True,
)
# Following are the list of benchmark programs for gapbs
benchmark_choices = ["cc", "bc", "tc", "pr", "bfs"]
synthetic_choices = ["0", "1"]
size_choices = [
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12",
"13",
"14",
"15",
"16",
"USA-road-d.NY.gr",
]
parser = argparse.ArgumentParser(
description="An example configuration script to run the gapbs benchmarks."
)
# The only positional argument accepted is the benchmark name in this script.
parser.add_argument(
"--benchmark",
type=str,
required=True,
help="Input the benchmark program to execute.",
choices=benchmark_choices,
)
parser.add_argument(
"--synthetic",
type=str,
required=True,
help="Synthetic Graph:: 1: synthetic graph is True; 0: real graph",
choices=synthetic_choices,
)
parser.add_argument(
"--size",
type=str,
required=True,
help="Graph Size:: If synthetic is True, then specify a size [1 .. 15]. \
Otherwise, specify a graph name [USA-road-d.NY.gr]",
choices=size_choices,
)
args = parser.parse_args()
# Setting up all the fixed system parameters here
# Caches: MESI Two Level Cache Hierarchy
from gem5.components.cachehierarchies.ruby.mesi_two_level_cache_hierarchy import (
MESITwoLevelCacheHierarchy,
)
cache_hierarchy = MESITwoLevelCacheHierarchy(
l1d_size="32kB",
l1d_assoc=8,
l1i_size="32kB",
l1i_assoc=8,
l2_size="256kB",
l2_assoc=16,
num_l2_banks=2,
)
# Memory: Dual Channel DDR4 2400 DRAM device.
# The X86 board only supports 3 GB of main memory.
memory = DualChannelDDR4_2400(size="3GB")
# Here we setup the processor. This is a special switchable processor in which
# a starting core type and a switch core type must be specified. Once a
# configuration is instantiated a user may call `processor.switch()` to switch
# from the starting core types to the switch core types. In this simulation
# we start with KVM cores to simulate the OS boot, then switch to the Timing
# cores for the command we wish to run after boot.
processor = SimpleSwitchableProcessor(
starting_core_type=CPUTypes.KVM,
switch_core_type=CPUTypes.TIMING,
isa=ISA.X86,
num_cores=2,
)
# Here we setup the board. The X86Board allows for Full-System X86 simulations
board = X86Board(
clk_freq="3GHz",
processor=processor,
memory=memory,
cache_hierarchy=cache_hierarchy,
)
# Here we set the FS workload, i.e., gapbs benchmark program
# After simulation has ended you may inspect
# `m5out/system.pc.com_1.device` to the stdout, if any.
# After the system boots, we execute the benchmark program and wait till the
# ROI `workbegin` annotation is reached. We start collecting the number of
# committed instructions till ROI ends (marked by `workend`). We then finish
# executing the rest of the benchmark.
# GAPBS benchmarks can be run using a synthetic graph
if args.synthetic == "1":
if args.size == "USA-road-d.NY.gr":
print(
"fatal: cannot use a real graph with --synthetic 1",
file=sys.stderr,
)
exit(-1)
command = "./{} -g {}\n".format(args.benchmark, args.size)
else:
command = "./{} -sf ../{}".format(args.benchmark, args.size)
board.set_kernel_disk_workload(
# The x86 linux kernel will be automatically downloaded to the
# `~/.cache/gem5` directory if not already present.
# gapbs benchamarks was tested with kernel version 4.19.83
kernel=Resource("x86-linux-kernel-4.19.83"),
# The x86-gapbs image will be automatically downloaded to the
# `~/.cache/gem5` directory if not already present.
disk_image=Resource("x86-gapbs"),
readfile_contents=command,
)
def handle_workbegin():
print("Done booting Linux")
print("Resetting stats at the start of ROI!")
m5.stats.reset()
global start_tick
start_tick = m5.curTick()
processor.switch()
yield False # E.g., continue the simulation.
def handle_workend():
print("Dump stats at the end of the ROI!")
m5.stats.dump()
yield True # Stop the simulation. We're done.
simulator = Simulator(
board=board,
on_exit_event={
ExitEvent.WORKBEGIN: handle_workbegin(),
ExitEvent.WORKEND: handle_workend(),
},
)
# We maintain the wall clock time.
globalStart = time.time()
print("Running the simulation")
print("Using KVM cpu")
# There are a few thihngs to note regarding the gapbs benchamrks. The first is
# that there are several ROI annotations in the code present in the disk image.
# These ROI begin and end calls are inside a loop. Therefore, we only simulate
# the first ROI annotation in details. The X86Board currently does not support
# `work items started count reached`.
simulator.run()
end_tick = m5.curTick()
# Since we simulated the ROI in details, therefore, simulation is over at this
# point.
# Simulation is over at this point. We acknowledge that all the simulation
# events were successful.
print("All simulation events were successful.")
# We print the final simulation statistics.
print("Done with the simulation")
print()
print("Performance statistics:")
print("Simulated time in ROI: %.2fs" % ((end_tick - start_tick) / 1e12))
print(
"Ran a total of", simulator.get_current_tick() / 1e12, "simulated seconds"
)
print(
"Total wallclock time: %.2fs, %.2f min"
% (time.time() - globalStart, (time.time() - globalStart) / 60)
)
|
[
"[email protected]"
] | |
f9097b43bb6e05b4023b3c72edab16670ec82815
|
1d928c3f90d4a0a9a3919a804597aa0a4aab19a3
|
/python/mopidy/2015/12/test_status.py
|
25b8dd7242990ccd7bce9f3046630e7351b4a834
|
[] |
no_license
|
rosoareslv/SED99
|
d8b2ff5811e7f0ffc59be066a5a0349a92cbb845
|
a062c118f12b93172e31e8ca115ce3f871b64461
|
refs/heads/main
| 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null |
UTF-8
|
Python
| false | false | 8,198 |
py
|
from __future__ import absolute_import, unicode_literals
import unittest
import pykka
from mopidy import core
from mopidy.core import PlaybackState
from mopidy.internal import deprecation
from mopidy.models import Track
from mopidy.mpd import dispatcher
from mopidy.mpd.protocol import status
from tests import dummy_audio, dummy_backend, dummy_mixer
PAUSED = PlaybackState.PAUSED
PLAYING = PlaybackState.PLAYING
STOPPED = PlaybackState.STOPPED
# FIXME migrate to using protocol.BaseTestCase instead of status.stats
# directly?
class StatusHandlerTest(unittest.TestCase):
def setUp(self): # noqa: N802
config = {
'core': {
'max_tracklist_length': 10000,
}
}
self.audio = dummy_audio.create_proxy()
self.mixer = dummy_mixer.create_proxy()
self.backend = dummy_backend.create_proxy(audio=self.audio)
with deprecation.ignore():
self.core = core.Core.start(
config,
audio=self.audio,
mixer=self.mixer,
backends=[self.backend]).proxy()
self.dispatcher = dispatcher.MpdDispatcher(core=self.core)
self.context = self.dispatcher.context
def tearDown(self): # noqa: N802
pykka.ActorRegistry.stop_all()
def set_tracklist(self, track):
self.backend.library.dummy_library = [track]
self.core.tracklist.add(uris=[track.uri]).get()
def test_stats_method(self):
result = status.stats(self.context)
self.assertIn('artists', result)
self.assertGreaterEqual(int(result['artists']), 0)
self.assertIn('albums', result)
self.assertGreaterEqual(int(result['albums']), 0)
self.assertIn('songs', result)
self.assertGreaterEqual(int(result['songs']), 0)
self.assertIn('uptime', result)
self.assertGreaterEqual(int(result['uptime']), 0)
self.assertIn('db_playtime', result)
self.assertGreaterEqual(int(result['db_playtime']), 0)
self.assertIn('db_update', result)
self.assertGreaterEqual(int(result['db_update']), 0)
self.assertIn('playtime', result)
self.assertGreaterEqual(int(result['playtime']), 0)
def test_status_method_contains_volume_with_na_value(self):
result = dict(status.status(self.context))
self.assertIn('volume', result)
self.assertEqual(int(result['volume']), -1)
def test_status_method_contains_volume(self):
self.core.mixer.set_volume(17)
result = dict(status.status(self.context))
self.assertIn('volume', result)
self.assertEqual(int(result['volume']), 17)
def test_status_method_contains_repeat_is_0(self):
result = dict(status.status(self.context))
self.assertIn('repeat', result)
self.assertEqual(int(result['repeat']), 0)
def test_status_method_contains_repeat_is_1(self):
self.core.tracklist.set_repeat(True)
result = dict(status.status(self.context))
self.assertIn('repeat', result)
self.assertEqual(int(result['repeat']), 1)
def test_status_method_contains_random_is_0(self):
result = dict(status.status(self.context))
self.assertIn('random', result)
self.assertEqual(int(result['random']), 0)
def test_status_method_contains_random_is_1(self):
self.core.tracklist.set_random(True)
result = dict(status.status(self.context))
self.assertIn('random', result)
self.assertEqual(int(result['random']), 1)
def test_status_method_contains_single(self):
result = dict(status.status(self.context))
self.assertIn('single', result)
self.assertIn(int(result['single']), (0, 1))
def test_status_method_contains_consume_is_0(self):
result = dict(status.status(self.context))
self.assertIn('consume', result)
self.assertEqual(int(result['consume']), 0)
def test_status_method_contains_consume_is_1(self):
self.core.tracklist.set_consume(True)
result = dict(status.status(self.context))
self.assertIn('consume', result)
self.assertEqual(int(result['consume']), 1)
def test_status_method_contains_playlist(self):
result = dict(status.status(self.context))
self.assertIn('playlist', result)
self.assertGreaterEqual(int(result['playlist']), 0)
self.assertLessEqual(int(result['playlist']), 2 ** 31 - 1)
def test_status_method_contains_playlistlength(self):
result = dict(status.status(self.context))
self.assertIn('playlistlength', result)
self.assertGreaterEqual(int(result['playlistlength']), 0)
def test_status_method_contains_xfade(self):
result = dict(status.status(self.context))
self.assertIn('xfade', result)
self.assertGreaterEqual(int(result['xfade']), 0)
def test_status_method_contains_state_is_play(self):
self.core.playback.state = PLAYING
result = dict(status.status(self.context))
self.assertIn('state', result)
self.assertEqual(result['state'], 'play')
def test_status_method_contains_state_is_stop(self):
self.core.playback.state = STOPPED
result = dict(status.status(self.context))
self.assertIn('state', result)
self.assertEqual(result['state'], 'stop')
def test_status_method_contains_state_is_pause(self):
self.core.playback.state = PLAYING
self.core.playback.state = PAUSED
result = dict(status.status(self.context))
self.assertIn('state', result)
self.assertEqual(result['state'], 'pause')
def test_status_method_when_playlist_loaded_contains_song(self):
self.set_tracklist(Track(uri='dummy:/a'))
self.core.playback.play().get()
result = dict(status.status(self.context))
self.assertIn('song', result)
self.assertGreaterEqual(int(result['song']), 0)
def test_status_method_when_playlist_loaded_contains_tlid_as_songid(self):
self.set_tracklist(Track(uri='dummy:/a'))
self.core.playback.play().get()
result = dict(status.status(self.context))
self.assertIn('songid', result)
self.assertEqual(int(result['songid']), 1)
def test_status_method_when_playing_contains_time_with_no_length(self):
self.set_tracklist(Track(uri='dummy:/a', length=None))
self.core.playback.play().get()
result = dict(status.status(self.context))
self.assertIn('time', result)
(position, total) = result['time'].split(':')
position = int(position)
total = int(total)
self.assertLessEqual(position, total)
def test_status_method_when_playing_contains_time_with_length(self):
self.set_tracklist(Track(uri='dummy:/a', length=10000))
self.core.playback.play()
result = dict(status.status(self.context))
self.assertIn('time', result)
(position, total) = result['time'].split(':')
position = int(position)
total = int(total)
self.assertLessEqual(position, total)
def test_status_method_when_playing_contains_elapsed(self):
self.set_tracklist(Track(uri='dummy:/a', length=60000))
self.core.playback.play().get()
self.core.playback.pause()
self.core.playback.seek(59123)
result = dict(status.status(self.context))
self.assertIn('elapsed', result)
self.assertEqual(result['elapsed'], '59.123')
def test_status_method_when_starting_playing_contains_elapsed_zero(self):
self.set_tracklist(Track(uri='dummy:/a', length=10000))
self.core.playback.play().get()
self.core.playback.pause()
result = dict(status.status(self.context))
self.assertIn('elapsed', result)
self.assertEqual(result['elapsed'], '0.000')
def test_status_method_when_playing_contains_bitrate(self):
self.set_tracklist(Track(uri='dummy:/a', bitrate=3200))
self.core.playback.play().get()
result = dict(status.status(self.context))
self.assertIn('bitrate', result)
self.assertEqual(int(result['bitrate']), 3200)
|
[
"[email protected]"
] | |
4b9e181c5fe7aac13ac618f98100dd7d6fa48825
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02678/s166039907.py
|
c6ab12488d55986930af9ba0117c10dc1bcf8bd2
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 423 |
py
|
N, M = map(int, input().split())
route = [[] for _ in range(N)]
sign = [0]*N
#print(route)
for i in range(M):
a,b = map(int, input().split())
route[a-1].append(b-1)
route[b-1].append(a-1)
#print(route)
marked = {0}
q = [0]
for i in q:
for j in route[i]:
if j in marked:
continue
q.append(j)
marked.add(j)
sign[j] = i+1
print('Yes')
[print(i) for i in sign[1:]]
|
[
"[email protected]"
] | |
e2cad7806874150acb3e00e8b4608cc4353915a9
|
ee81f6a67eba2d01ca4d7211630deb621c78189d
|
/my_profile/manage.py
|
b4a7631a6dfab5e670a48068f1d1f80597c22456
|
[] |
no_license
|
zkan/saksiam-django-workshop
|
d452fa0ffec687a287965988a9afe256222a7920
|
ccef5359e04693681040c482865350720fa49189
|
refs/heads/main
| 2023-06-07T00:17:02.524926 | 2021-06-28T15:43:22 | 2021-06-28T15:43:22 | 374,397,011 | 1 | 1 | null | 2021-06-28T15:43:23 | 2021-06-06T15:33:50 |
Python
|
UTF-8
|
Python
| false | false | 1,062 |
py
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
# This allows easy placement of apps within the interior
# {{ cookiecutter.project_slug }} directory.
current_path = Path(__file__).parent.resolve()
sys.path.append(str(current_path / "my_profile"))
execute_from_command_line(sys.argv)
|
[
"[email protected]"
] | |
00706a0ce78e5eee13d25f0e6fae2b55a8f50fe9
|
6ce826375d4ecc7b15cd843a0bf85438db7d1389
|
/cbmcfs3_runner/scenarios/demand_plus_minus.py
|
05177670f00eb079ad601a2bbc5d8ca5533566b6
|
[
"MIT"
] |
permissive
|
xapple/cbmcfs3_runner
|
b34aaeeed34739d2d94d4ee485f4973403aa6843
|
ec532819e0a086077475bfd479836a378f187f6f
|
refs/heads/master
| 2021-12-26T07:06:02.073775 | 2021-10-25T14:15:53 | 2021-10-25T14:15:53 | 172,949,685 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,759 |
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by Lucas Sinclair and Paul Rougieux.
JRC biomass Project.
Unit D1 Bioeconomy.
"""
# Built-in modules #
# First party modules #
from plumbing.cache import property_cached
# Internal modules #
from cbmcfs3_runner.scenarios.base_scen import Scenario
from cbmcfs3_runner.core.runner import Runner
###############################################################################
class DemandPlusMinus(Scenario):
"""
This scenario is similar to the `static_demand` scenario. Except that it
multiples said demand by a variable ratio before running the model.
Either increasing the demand or reducing it.
"""
demand_ratio = 1.0
@property_cached
def runners(self):
"""A dictionary of country codes as keys with a list of runners as values."""
# Create all runners #
result = {c.iso2_code: [Runner(self, c, 0)] for c in self.continent}
# Modify these runners #
for country in self.continent:
# Get the maker #
runner = result[country.iso2_code][-1]
dist_maker = runner.pre_processor.disturbance_maker
# Adjust the artificial ratios #
dist_maker.irw_artificial_ratio = self.demand_ratio
dist_maker.fw_artificial_ratio = self.demand_ratio
# Don't modify these runners #
return result
###############################################################################
class DemandPlus20(DemandPlusMinus):
short_name = 'demand_plus_20'
demand_ratio = 1.2
###############################################################################
class DemandMinus20(DemandPlusMinus):
short_name = 'demand_minus_20'
demand_ratio = 0.8
|
[
"[email protected]"
] | |
e7d80b8d7f8bb438a4b24d5e18068fb5eefecd31
|
43acaf9718b0a62594ed8e42b6c01099acd2d075
|
/apps/asistencias/migrations/0003_auto_20200402_1450.py
|
0de223587557a923ebbda4dff048d594001e03b5
|
[] |
no_license
|
JmSubelza/Demo
|
2f357889975c183b4a0f627330a80e535823faea
|
affceeadb87f1f14fb4e481851a1ac107e512f48
|
refs/heads/master
| 2023-05-14T18:16:38.153963 | 2020-04-28T16:15:27 | 2020-04-28T16:15:27 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 574 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2020-04-02 19:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('asistencias', '0002_auto_20200305_1646'),
]
operations = [
migrations.AlterModelOptions(
name='asistencias',
options={'verbose_name': 'asistencia', 'verbose_name_plural': 'asistencias'},
),
migrations.AlterModelTable(
name='asistencias',
table='asistencia',
),
]
|
[
"[email protected]"
] | |
916bde1a5aadf100d701152de0aa85f421789c8d
|
531c47c15b97cbcb263ec86821d7f258c81c0aaf
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/operations/_virtual_network_gateway_connections_operations.py
|
fa59556ffd80b020af121e5b5ac1dbeb307daf2f
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
YijunXieMS/azure-sdk-for-python
|
be364d3b88204fd3c7d223df23756386ff7a3361
|
f779de8e53dbec033f98f976284e6d9491fd60b3
|
refs/heads/master
| 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 |
MIT
| 2020-06-16T16:38:15 | 2019-08-30T21:08:55 |
Python
|
UTF-8
|
Python
| false | false | 42,994 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewayConnectionsOperations(object):
"""VirtualNetworkGatewayConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.VirtualNetworkGatewayConnection"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGatewayConnection"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnection"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGatewayConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.VirtualNetworkGatewayConnection"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Creates or updates a virtual network gateway connection in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the create or update virtual network gateway
connection operation.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGatewayConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGatewayConnection"
"""Gets the specified virtual network gateway connection by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGatewayConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnection"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Deletes the specified virtual network Gateway connection.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGatewayConnection"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnection"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Updates a virtual network gateway connection tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to update virtual network gateway connection tags.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGatewayConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}'} # type: ignore
def _set_shared_key_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.ConnectionSharedKey"
**kwargs # type: Any
):
# type: (...) -> "models.ConnectionSharedKey"
cls = kwargs.pop('cls', None) # type: ClsType["models.ConnectionSharedKey"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._set_shared_key_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionSharedKey')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_shared_key_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey'} # type: ignore
def begin_set_shared_key(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.ConnectionSharedKey"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""The Put VirtualNetworkGatewayConnectionSharedKey operation sets the virtual network gateway
connection shared key for passed virtual network gateway connection in the specified resource
group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection name.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the Begin Set Virtual Network Gateway connection
Shared key operation throughNetwork resource provider.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.ConnectionSharedKey
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectionSharedKey or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ConnectionSharedKey]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ConnectionSharedKey"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey'} # type: ignore
def get_shared_key(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ConnectionSharedKey"
"""The Get VirtualNetworkGatewayConnectionSharedKey operation retrieves information about the
specified virtual network gateway connection shared key through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection shared
key name.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSharedKey, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.ConnectionSharedKey
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConnectionSharedKey"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
# Construct URL
url = self.get_shared_key.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualNetworkGatewayConnectionListResult"]
"""The List VirtualNetworkGatewayConnections operation retrieves all the virtual network gateways
connections created.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.VirtualNetworkGatewayConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayConnectionListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections'} # type: ignore
def _reset_shared_key_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.ConnectionResetSharedKey"
**kwargs # type: Any
):
# type: (...) -> "models.ConnectionResetSharedKey"
cls = kwargs.pop('cls', None) # type: ClsType["models.ConnectionResetSharedKey"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._reset_shared_key_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionResetSharedKey')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionResetSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_shared_key_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey/reset'} # type: ignore
def begin_reset_shared_key(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.ConnectionResetSharedKey"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""The VirtualNetworkGatewayConnectionResetSharedKey operation resets the virtual network gateway
connection shared key for passed virtual network gateway connection in the specified resource
group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection reset
shared key Name.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the begin reset virtual network gateway connection
shared key operation through network resource provider.
:type parameters: ~azure.mgmt.network.v2019_04_01.models.ConnectionResetSharedKey
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectionResetSharedKey or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_04_01.models.ConnectionResetSharedKey]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ConnectionResetSharedKey"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionResetSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_shared_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey/reset'} # type: ignore
|
[
"[email protected]"
] | |
03d71db6e4e6655b3342ac509b967885f5061b4e
|
3b504a983f1807ae7c5af51078bfab8c187fc82d
|
/common/GameEventsCommon/__init__.py
|
27c16f961bd42cf2edcbba2c6b9597fb0bf8b1cc
|
[] |
no_license
|
SEA-group/wowp_scripts
|
7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58
|
2fe54a44df34f2dcaa6860a23b835dcd8dd21402
|
refs/heads/master
| 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 70 |
py
|
# Embedded file name: scripts/common/GameEventsCommon/__init__.py
pass
|
[
"[email protected]"
] | |
fb34240f1d836c1b23a16c911636ea02d8ba2b96
|
3c57fa50b0f922a75c29403f82f45b4ea850520c
|
/ml_models/__init__.py
|
b81caec1686bb9328ba6f5b4a421a9381a516193
|
[
"MIT"
] |
permissive
|
sampathweb/imagnet-predictor-api
|
46ed12d82dd9220c364e7316bc30f320451315ed
|
76a939809621d2e00b957bfeb44ea70d51bcbfc5
|
refs/heads/master
| 2021-01-20T04:08:35.002586 | 2017-05-05T13:03:18 | 2017-05-05T13:03:18 | 89,644,070 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 47 |
py
|
from .classifiers import ImagenetModel # noqa
|
[
"[email protected]"
] | |
5abfe9793871b012e49279286068e81223a85910
|
44d8042c77a8f18c03bec92b619425a0787e3ddb
|
/Classes/py3intro/EXAMPLES/creating_dicts.py
|
fc407b28becb13ff2907caaa355638516327a687
|
[] |
no_license
|
Jgoschke86/Jay
|
3015613770d85d9fa65620cc1d2514357569b9bb
|
9a3cd87a5cff35c1f2a4fd6a14949c6f3694e3e2
|
refs/heads/master
| 2023-05-26T15:49:08.681125 | 2023-04-28T22:16:09 | 2023-04-28T22:16:09 | 215,616,981 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 710 |
py
|
#!/usr/bin/python3
d1 = dict()
d2 = {}
d3 = dict(red=5, blue=10, yellow=1, brown=5, black=12)
airports = { 'IAD': 'Dulles', 'SEA': 'Seattle-Tacoma',
'RDU': 'Raleigh-Durham', 'LAX': 'Los Angeles' }
pairs = [('Washington', 'Olympia'),('Virginia','Richmond'),
('Oregon','Salem'), ('California', 'Sacramento')]
state_caps = dict(pairs)
print(state_caps['Virginia'])
print(d3['red'])
print(airports['LAX'])
airports['SLC'] = 'Salt Lake City'
airports['LAX'] = 'Lost Angels'
print(airports['SLC'])
key = 'PSP'
if key in airports:
print(airports[key])
print(airports.get(key))
print(airports.get(key, 'NO SUCH AIRPORT'))
print(airports.setdefault(key, 'Palm Springs'))
print(key in airports)
|
[
"[email protected]"
] | |
275e7ee7138368d6558257df45f8773d5533b5f9
|
8e858eea97c8654040552d190574acfc738b66e0
|
/tests/test_util.py
|
70f815d479ff6bf91ee0eb438df8782123904ec6
|
[
"MIT"
] |
permissive
|
Typecraft/casetagger
|
73f0105caa7ab8a84e7ae3f84720797966addd31
|
b311f33449c8796e656600e8c9f255b40c4c2dce
|
refs/heads/develop
| 2023-01-14T00:28:06.339217 | 2017-05-10T10:44:09 | 2017-05-10T10:44:09 | 65,621,998 | 1 | 0 |
MIT
| 2022-12-26T20:26:26 | 2016-08-13T14:47:36 |
Python
|
UTF-8
|
Python
| false | false | 6,592 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from casetagger.util import *
from typecraft_python.models import Text, Morpheme, Phrase, Word
class TestUtil(object):
@classmethod
def setup_class(cls):
pass
def test_get_morphemes_concatenated(self):
morpheme = Morpheme()
morpheme.glosses.append("1SG")
morpheme.glosses.append("3SG")
morpheme.glosses.append("2SG")
concatted = get_glosses_concatenated(morpheme)
assert concatted == "1SG.2SG.3SG"
def test_separate_texts_by_language(self):
text_1 = Text()
text_2 = Text()
text_3 = Text()
text_4 = Text()
text_5 = Text()
text_1.language = "nno"
text_2.language = "eng"
text_3.language = "eng"
text_4.language = "kri"
text_5.language = "nno"
texts = [text_1, text_2, text_3, text_4, text_5]
separated = separate_texts_by_languages(texts)
assert isinstance(separated, dict)
assert "nno" in separated
assert "eng" in separated
assert "kri" in separated
assert isinstance(separated["nno"], list)
assert isinstance(separated["eng"], list)
assert isinstance(separated["kri"], list)
assert text_1 in separated["nno"]
assert text_2 in separated["eng"]
assert text_3 in separated["eng"]
assert text_4 in separated["kri"]
assert text_5 in separated["nno"]
def test_get_text_words(self):
text = Text()
phrase = Phrase()
phrase_1 = Phrase()
word_1 = Word()
word_2 = Word()
word_3 = Word()
word_4 = Word()
phrase.add_word(word_1)
phrase.add_word(word_2)
phrase.add_word(word_3)
phrase_1.add_word(word_4)
text.add_phrase(phrase)
text.add_phrase(phrase_1)
words = get_text_words(text)
assert word_1 in words
assert word_2 in words
assert word_3 in words
assert word_4 in words
def test_get_text_morphemes(self):
text = Text()
phrase_1 = Phrase()
phrase_2 = Phrase()
word_1 = Word()
word_2 = Word()
word_3 = Word()
morpheme_1 = Morpheme()
morpheme_2 = Morpheme()
morpheme_3 = Morpheme()
morpheme_4 = Morpheme()
morpheme_5 = Morpheme()
phrase_1.add_word(word_1)
phrase_2.add_word(word_2)
phrase_2.add_word(word_3)
word_1.add_morpheme(morpheme_1)
word_1.add_morpheme(morpheme_2)
word_2.add_morpheme(morpheme_3)
word_2.add_morpheme(morpheme_4)
word_3.add_morpheme(morpheme_5)
text.add_phrase(phrase_1)
text.add_phrase(phrase_2)
morphemes = get_text_morphemes(text)
assert morpheme_1 in morphemes
assert morpheme_2 in morphemes
assert morpheme_3 in morphemes
assert morpheme_4 in morphemes
assert morpheme_5 in morphemes
def test_get_consecutive_sublists(self):
a_list = list(range(6))
sublists = get_consecutive_sublists_of_length(a_list, 2)
assert sublists == [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]]
sublists = get_consecutive_sublists_of_length(a_list, 4)
assert sublists == [[0, 1, 2, 3], [1, 2, 3, 4], [2, 3, 4, 5]]
def test_get_consecutive_sublists_around(self):
a_list = list(range(6)) # [0,1,2,3,4,5]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 3, 3)
assert sublists == [[0, 1, 2], [1, 2, 4], [2, 4, 5]]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 0, 3)
assert sublists == [[1, 2, 3]]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 5, 2)
assert sublists == [[3, 4]]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 3, 2)
assert sublists == [[1, 2], [2, 4], [4, 5]]
a_list = a_list + [6] # [0,1,2,3,4,5,6]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 3, 3)
assert sublists == [[0, 1, 2], [1, 2, 4], [2, 4, 5], [4, 5, 6]]
sublists = get_consecutive_sublists_of_length_around_index(a_list, 3, 1)
assert sublists == [[2], [4]]
def test_get_all_prefix_sublists_upto_length(self):
a_list = list(range(6)) # [0,1,2,3,4,5]
sublists = get_all_prefix_sublists_upto_length(a_list, 3, 2)
assert sublists == [[2], [1, 2]]
sublists = get_all_prefix_sublists_upto_length(a_list, 0, 3)
assert sublists == []
sublists = get_all_prefix_sublists_upto_length(a_list, 5, 4)
assert sublists == [[4], [3, 4], [2, 3, 4], [1, 2, 3, 4]]
def test_get_all_suffix_sublists_upto_length(self):
a_list = list(range(6)) # [0,1,2,3,4,5]
sublists = get_all_suffix_sublists_upto_length(a_list, 3, 2)
assert sublists == [[4], [4, 5]]
sublists = get_all_suffix_sublists_upto_length(a_list, 5, 3)
assert sublists == []
sublists = get_all_suffix_sublists_upto_length(a_list, 0, 4)
assert sublists == [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4]]
def test_get_surrounding_sublists_upto_length(self):
a_list = list(range(6)) #[0, 1, 2, 3, 4, 5]
sublists = get_surrounding_sublists_upto_length(a_list, 3, 1)
assert sublists == [[2, 4]]
sublists = get_surrounding_sublists_upto_length(a_list, 3, 2)
assert sublists == [[2, 4], [1, 2, 4, 5]]
sublists = get_surrounding_sublists_upto_length(a_list, 1, 2)
assert sublists == [[0, 2], [0, 2, 3]]
sublists = get_surrounding_sublists_upto_length(a_list, 0, 3)
assert sublists == [[1], [1, 2], [1, 2, 3]]
sublists = get_surrounding_sublists_upto_length([1], 0, 2, filler=[2])
print(sublists)
def test_get_surrounding_sublists_upto_length_with_filler(self):
a_list = list(range(6))
sublists = get_surrounding_sublists_upto_length(a_list, 3, 1, filler=[13])
assert sublists == [[2, 13, 4]]
sublists = get_surrounding_sublists_upto_length(a_list, 1, 2, filler=[97])
assert sublists == [[0, 97, 2], [0, 97, 2, 3]]
def test_standard_0_to_1000_factor_scale(self):
assert standard_0_to_1000_factor_scale(0) == 0
assert standard_0_to_1000_factor_scale(1) == 1
assert standard_0_to_1000_factor_scale(20) > 2
assert standard_0_to_1000_factor_scale(200) > 3
assert standard_0_to_1000_factor_scale(1000) < 4
|
[
"[email protected]"
] | |
b193d042f11de77758ebb4740dee22fad21ba1f8
|
684658837ca81a9a906ff8156a28f67b0ed53e81
|
/venv/bin/jupyter-serverextension
|
5493e28add6cf5377f63b4abb7dde6311928fb15
|
[] |
no_license
|
sangramga/djangocon
|
30d6d47394daadfa162c5f96bf2e8476e580906d
|
d67203a7a7be2cefedbd75e080a6737e71a5bad3
|
refs/heads/master
| 2020-03-17T21:20:35.097025 | 2017-08-15T19:45:47 | 2017-08-15T19:45:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 277 |
#!/Users/lorenamesa/Desktop/bootstrap_ml_project/venv/bin/python3.5
# -*- coding: utf-8 -*-
import re
import sys
from notebook.serverextensions import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"[email protected]"
] | ||
31f6af36af85cf21686384f18d934765ad86235b
|
e1add42d3095608e73717cddf39646a1eaa62729
|
/setup.py
|
67db060118ca11d8cd2841bba696b5410defe1a0
|
[
"MIT"
] |
permissive
|
PhoenixEra/mbcd
|
70870e7a774f649b22bb42810118640333d8c822
|
4be85b964bf02818f2fc83b21f2b339b3fc7a14f
|
refs/heads/main
| 2023-07-01T21:28:00.506563 | 2021-08-04T15:03:06 | 2021-08-04T15:03:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 522 |
py
|
from setuptools import setup, find_packages
REQUIRED = ['numpy', 'pandas', 'matplotlib', 'stable-baselines']
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='mbcd',
version='0.1',
packages=['mbcd',],
install_requires=REQUIRED,
author='LucasAlegre',
author_email='[email protected]',
long_description=long_description,
url='https://github.com/LucasAlegre/mbcd',
license="MIT",
description='Model-Based Reinforcement Learning Context Detection.'
)
|
[
"[email protected]"
] | |
015308be165b92d7ae113955dc6228fb2a19068a
|
5c4e2a06779f3a5e9eee05d501eb710a684d9cf9
|
/networking_tn/db/migration/alembic_migrations/versions/38462bfd9dc_fortinet_plugin_database.py
|
555a7fcec5ffaeea96798d06a4cb8953e5602195
|
[
"Apache-2.0"
] |
permissive
|
xjforfuture/networking-tn
|
e2b8e2a77b997af65a735c7a45401102eb9f9c94
|
1a21765a1ec45624dd83aa015ef8022d635d96b2
|
refs/heads/master
| 2021-09-10T09:06:48.372663 | 2018-03-23T08:44:40 | 2018-03-23T08:44:40 | 113,955,545 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,255 |
py
|
# Copyright 2018 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""create tsinghuanet tables
Revision ID: 38462bfd9dc
Revises: None
Create Date: 2018-01-01 14:26:02.305436
"""
# revision identifiers, used by Alembic.
revision = '38462bfd9dc'
down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tn_routers')
op.drop_table('tn_interfaces')
op.drop_table('tn_static_routes')
op.drop_table('tn_vms')
op.drop_table('tn_addresss')
op.drop_table('tn_services')
op.drop_table('tn_snat_rules')
op.drop_table('tn_rules')
op.drop_table('tn_policys')
op.drop_table('tn_firewalls')
### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tn_routers',
sa.Column('id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('priv_id', mysql.VARCHAR(length=32), nullable=False),
sa.Column('tenant_id', mysql.VARCHAR(length=64), nullable=True),
sa.Column('name', mysql.VARCHAR(length=64), nullable=True),
sa.Column('manage_ip', mysql.VARCHAR(length=32), nullable=False),
sa.Column('image_name', mysql.VARCHAR(length=128), nullable=False),
sa.Column('snat_inner_use', mysql.VARCHAR(length=1024), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tn_interfaces',
sa.Column('id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('router_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('inner_id', mysql.INTEGER(display_width=32), autoincrement=False, nullable=True),
sa.Column('extern_name', mysql.VARCHAR(length=64), nullable=False),
sa.Column('inner_name', mysql.VARCHAR(length=32), nullable=False),
sa.Column('state', mysql.VARCHAR(length=16), server_default=sa.text(u"'down'"), nullable=True),
sa.Column('type', mysql.VARCHAR(length=16), nullable=True),
sa.Column('mac', mysql.VARCHAR(length=32), nullable=True),
sa.Column('vlan_id', mysql.INTEGER(display_width=32), autoincrement=False, nullable=True),
sa.Column('ip_prefix', mysql.VARCHAR(length=32), nullable=True),
sa.Column('is_manage', mysql.VARCHAR(length=16), server_default=sa.text(u"'false'"), nullable=True),
sa.Column('is_gw', mysql.VARCHAR(length=16), server_default=sa.text(u"'false'"), nullable=True),
sa.Column('is_sub', mysql.VARCHAR(length=16), server_default=sa.text(u"'false'"), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tn_static_routes',
sa.Column('router_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('dest', mysql.VARCHAR(length=32), nullable=False),
sa.Column('prefix', mysql.VARCHAR(length=32), nullable=False),
sa.Column('next_hop', mysql.VARCHAR(length=32), nullable=False),
sa.PrimaryKeyConstraint('router_id')
)
op.create_table('tn_addresss',
sa.Column('rule_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('name', mysql.VARCHAR(length=64), nullable=False),
sa.Column('ip_prefix', mysql.VARCHAR(length=32), nullable=False),
sa.PrimaryKeyConstraint('rule_id', 'name')
)
op.create_table('tn_services',
sa.Column('rule_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('name', mysql.VARCHAR(length=64), nullable=False),
sa.Column('protocol', mysql.VARCHAR(length=16), nullable=False),
sa.Column('src_port_min', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.Column('src_port_max', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.Column('dst_port_min', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.Column('dst_port_max', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('rule_id')
)
op.create_table('tn_snat_rules',
sa.Column('router_id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('inner_id', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.Column('srcaddr', mysql.VARCHAR(length=32), nullable=True),
sa.Column('dstaddr', mysql.VARCHAR(length=32), nullable=True),
sa.Column('trans_addr', mysql.VARCHAR(length=32), nullable=True),
sa.Column('srcaddr_name', mysql.VARCHAR(length=32), nullable=True),
sa.Column('dstaddr_name', mysql.VARCHAR(length=32), nullable=True),
sa.Column('trans_addr_name', mysql.VARCHAR(length=32), nullable=True),
sa.Column('trans', mysql.VARCHAR(length=16), nullable=True),
sa.PrimaryKeyConstraint('router_id', 'inner_id')
)
op.create_table('tn_rules',
sa.Column('id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('policy_id', mysql.VARCHAR(length=64), nullable=True),
sa.Column('inner_id', mysql.INTEGER(display_width=32), autoincrement=False, nullable=False),
sa.Column('name', mysql.VARCHAR(length=32), nullable=False),
sa.Column('desc', mysql.VARCHAR(length=32), nullable=True),
sa.Column('protocol', mysql.VARCHAR(length=16), nullable=True),
sa.Column('action', mysql.VARCHAR(length=16), nullable=True),
sa.Column('enable', mysql.VARCHAR(length=16), nullable=True),
sa.Column('srcaddr', mysql.VARCHAR(length=64), nullable=True),
sa.Column('dstaddr', mysql.VARCHAR(length=64), nullable=True),
sa.Column('service', mysql.VARCHAR(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tn_policys',
sa.Column('id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('name', mysql.VARCHAR(length=32), nullable=True),
sa.Column('desc', mysql.VARCHAR(length=32), nullable=True),
sa.Column('rule_inner_use', mysql.VARCHAR(length=1024), nullable=True),
sa.Column('reference_count', mysql.INTEGER(display_width=32), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tn_firewalls',
sa.Column('id', mysql.VARCHAR(length=64), nullable=False),
sa.Column('name', mysql.VARCHAR(length=32), nullable=True),
sa.Column('desc', mysql.VARCHAR(length=64), nullable=True),
sa.Column('policy_id', mysql.VARCHAR(length=64), nullable=True),
sa.Column('router_ids', mysql.VARCHAR(length=1024), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
|
[
"[email protected]"
] | |
245ffc8b4970e60d6c80835695d5aacfc03dcd78
|
3ac84fa46db498e914f6e1aaf2eff490a63807a1
|
/keystone/keystone/auth/controllers.py
|
21e4c9bbdbf79fe3ca1d3d9fafc1f84e5b9f14d5
|
[
"Apache-2.0"
] |
permissive
|
onsoku/horizon_review
|
3c5f1a8f863142f3f724f59771ad39604bca4c20
|
80cca0badc61b4754ef2c10f23a0ee48cd227445
|
refs/heads/master
| 2020-05-20T11:11:25.625186 | 2015-01-21T01:01:59 | 2015-01-21T01:01:59 | 29,002,325 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 26,981 |
py
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from keystoneclient.common import cms
from oslo.utils import timeutils
import six
from keystone.assignment import controllers as assignment_controllers
from keystone.common import authorization
from keystone.common import controller
from keystone.common import dependency
from keystone.common import wsgi
from keystone import config
from keystone.contrib import federation
from keystone import exception
from keystone.i18n import _, _LI
from keystone.openstack.common import importutils
from keystone.openstack.common import jsonutils
from keystone.openstack.common import log
LOG = log.getLogger(__name__)
CONF = config.CONF
# registry of authentication methods
AUTH_METHODS = {}
AUTH_PLUGINS_LOADED = False
def load_auth_methods():
global AUTH_PLUGINS_LOADED
if AUTH_PLUGINS_LOADED:
# Only try and load methods a single time.
return
# config.setup_authentication should be idempotent, call it to ensure we
# have setup all the appropriate configuration options we may need.
config.setup_authentication()
for plugin in CONF.auth.methods:
if '.' in plugin:
# NOTE(morganfainberg): if '.' is in the plugin name, it should be
# imported rather than used as a plugin identifier.
plugin_class = plugin
driver = importutils.import_object(plugin)
if not hasattr(driver, 'method'):
raise ValueError(_('Cannot load an auth-plugin by class-name '
'without a "method" attribute defined: %s'),
plugin_class)
LOG.info(_LI('Loading auth-plugins by class-name is deprecated.'))
plugin_name = driver.method
else:
plugin_name = plugin
plugin_class = CONF.auth.get(plugin)
driver = importutils.import_object(plugin_class)
if plugin_name in AUTH_METHODS:
raise ValueError(_('Auth plugin %(plugin)s is requesting '
'previously registered method %(method)s') %
{'plugin': plugin_class, 'method': driver.method})
AUTH_METHODS[plugin_name] = driver
AUTH_PLUGINS_LOADED = True
def get_auth_method(method_name):
global AUTH_METHODS
if method_name not in AUTH_METHODS:
raise exception.AuthMethodNotSupported()
return AUTH_METHODS[method_name]
class AuthContext(dict):
"""Retrofitting auth_context to reconcile identity attributes.
The identity attributes must not have conflicting values among the
auth plug-ins. The only exception is `expires_at`, which is set to its
earliest value.
"""
# identity attributes need to be reconciled among the auth plugins
IDENTITY_ATTRIBUTES = frozenset(['user_id', 'project_id',
'access_token_id', 'domain_id',
'expires_at'])
def __setitem__(self, key, val):
if key in self.IDENTITY_ATTRIBUTES and key in self:
existing_val = self[key]
if key == 'expires_at':
# special treatment for 'expires_at', we are going to take
# the earliest expiration instead.
if existing_val != val:
LOG.info(_LI('"expires_at" has conflicting values '
'%(existing)s and %(new)s. Will use the '
'earliest value.'),
{'existing': existing_val, 'new': val})
if existing_val is None or val is None:
val = existing_val or val
else:
val = min(existing_val, val)
elif existing_val != val:
msg = _('Unable to reconcile identity attribute %(attribute)s '
'as it has conflicting values %(new)s and %(old)s') % (
{'attribute': key,
'new': val,
'old': existing_val})
raise exception.Unauthorized(msg)
return super(AuthContext, self).__setitem__(key, val)
# TODO(blk-u): this class doesn't use identity_api directly, but makes it
# available for consumers. Consumers should probably not be getting
# identity_api from this since it's available in global registry, then
# identity_api should be removed from this list.
@dependency.requires('assignment_api', 'identity_api', 'trust_api')
class AuthInfo(object):
"""Encapsulation of "auth" request."""
@staticmethod
def create(context, auth=None):
auth_info = AuthInfo(context, auth=auth)
auth_info._validate_and_normalize_auth_data()
return auth_info
def __init__(self, context, auth=None):
self.context = context
self.auth = auth
self._scope_data = (None, None, None)
# self._scope_data is (domain_id, project_id, trust_ref)
# project scope: (None, project_id, None)
# domain scope: (domain_id, None, None)
# trust scope: (None, None, trust_ref)
# unscoped: (None, None, None)
def _assert_project_is_enabled(self, project_ref):
# ensure the project is enabled
try:
self.assignment_api.assert_project_enabled(
project_id=project_ref['id'],
project=project_ref)
except AssertionError as e:
LOG.warning(e)
six.reraise(exception.Unauthorized, exception.Unauthorized(e),
sys.exc_info()[2])
def _assert_domain_is_enabled(self, domain_ref):
try:
self.assignment_api.assert_domain_enabled(
domain_id=domain_ref['id'],
domain=domain_ref)
except AssertionError as e:
LOG.warning(e)
six.reraise(exception.Unauthorized, exception.Unauthorized(e),
sys.exc_info()[2])
def _lookup_domain(self, domain_info):
domain_id = domain_info.get('id')
domain_name = domain_info.get('name')
domain_ref = None
if not domain_id and not domain_name:
raise exception.ValidationError(attribute='id or name',
target='domain')
try:
if domain_name:
domain_ref = self.assignment_api.get_domain_by_name(
domain_name)
else:
domain_ref = self.assignment_api.get_domain(domain_id)
except exception.DomainNotFound as e:
LOG.exception(e)
raise exception.Unauthorized(e)
self._assert_domain_is_enabled(domain_ref)
return domain_ref
def _lookup_project(self, project_info):
project_id = project_info.get('id')
project_name = project_info.get('name')
project_ref = None
if not project_id and not project_name:
raise exception.ValidationError(attribute='id or name',
target='project')
try:
if project_name:
if 'domain' not in project_info:
raise exception.ValidationError(attribute='domain',
target='project')
domain_ref = self._lookup_domain(project_info['domain'])
project_ref = self.assignment_api.get_project_by_name(
project_name, domain_ref['id'])
else:
project_ref = self.assignment_api.get_project(project_id)
# NOTE(morganfainberg): The _lookup_domain method will raise
# exception.Unauthorized if the domain isn't found or is
# disabled.
self._lookup_domain({'id': project_ref['domain_id']})
except exception.ProjectNotFound as e:
LOG.exception(e)
raise exception.Unauthorized(e)
self._assert_project_is_enabled(project_ref)
return project_ref
def _lookup_trust(self, trust_info):
trust_id = trust_info.get('id')
if not trust_id:
raise exception.ValidationError(attribute='trust_id',
target='trust')
trust = self.trust_api.get_trust(trust_id)
if not trust:
raise exception.TrustNotFound(trust_id=trust_id)
return trust
def _validate_and_normalize_scope_data(self):
"""Validate and normalize scope data."""
if 'scope' not in self.auth:
return
if sum(['project' in self.auth['scope'],
'domain' in self.auth['scope'],
'OS-TRUST:trust' in self.auth['scope']]) != 1:
raise exception.ValidationError(
attribute='project, domain, or OS-TRUST:trust',
target='scope')
if 'project' in self.auth['scope']:
project_ref = self._lookup_project(self.auth['scope']['project'])
self._scope_data = (None, project_ref['id'], None)
elif 'domain' in self.auth['scope']:
domain_ref = self._lookup_domain(self.auth['scope']['domain'])
self._scope_data = (domain_ref['id'], None, None)
elif 'OS-TRUST:trust' in self.auth['scope']:
if not CONF.trust.enabled:
raise exception.Forbidden('Trusts are disabled.')
trust_ref = self._lookup_trust(
self.auth['scope']['OS-TRUST:trust'])
# TODO(ayoung): when trusts support domains, fill in domain data
if trust_ref.get('project_id') is not None:
project_ref = self._lookup_project(
{'id': trust_ref['project_id']})
self._scope_data = (None, project_ref['id'], trust_ref)
else:
self._scope_data = (None, None, trust_ref)
def _validate_auth_methods(self):
if 'identity' not in self.auth:
raise exception.ValidationError(attribute='identity',
target='auth')
# make sure auth methods are provided
if 'methods' not in self.auth['identity']:
raise exception.ValidationError(attribute='methods',
target='identity')
# make sure all the method data/payload are provided
for method_name in self.get_method_names():
if method_name not in self.auth['identity']:
raise exception.ValidationError(attribute=method_name,
target='identity')
# make sure auth method is supported
for method_name in self.get_method_names():
if method_name not in AUTH_METHODS:
raise exception.AuthMethodNotSupported()
def _validate_and_normalize_auth_data(self):
"""Make sure "auth" is valid."""
# make sure "auth" exist
if not self.auth:
raise exception.ValidationError(attribute='auth',
target='request body')
self._validate_auth_methods()
self._validate_and_normalize_scope_data()
def get_method_names(self):
"""Returns the identity method names.
:returns: list of auth method names
"""
# Sanitizes methods received in request's body
# Filters out duplicates, while keeping elements' order.
method_names = []
for method in self.auth['identity']['methods']:
if method not in method_names:
method_names.append(method)
return method_names
def get_method_data(self, method):
"""Get the auth method payload.
:returns: auth method payload
"""
if method not in self.auth['identity']['methods']:
raise exception.ValidationError(attribute=method,
target='identity')
return self.auth['identity'][method]
def get_scope(self):
"""Get scope information.
Verify and return the scoping information.
:returns: (domain_id, project_id, trust_ref).
If scope to a project, (None, project_id, None)
will be returned.
If scoped to a domain, (domain_id, None, None)
will be returned.
If scoped to a trust, (None, project_id, trust_ref),
Will be returned, where the project_id comes from the
trust definition.
If unscoped, (None, None, None) will be returned.
"""
return self._scope_data
def set_scope(self, domain_id=None, project_id=None, trust=None):
"""Set scope information."""
if domain_id and project_id:
msg = _('Scoping to both domain and project is not allowed')
raise ValueError(msg)
if domain_id and trust:
msg = _('Scoping to both domain and trust is not allowed')
raise ValueError(msg)
if project_id and trust:
msg = _('Scoping to both project and trust is not allowed')
raise ValueError(msg)
self._scope_data = (domain_id, project_id, trust)
@dependency.requires('assignment_api', 'catalog_api', 'identity_api',
'token_provider_api', 'trust_api')
class Auth(controller.V3Controller):
# Note(atiwari): From V3 auth controller code we are
# calling protection() wrappers, so we need to setup
# the member_name and collection_name attributes of
# auth controller code.
# In the absence of these attributes, default 'entity'
# string will be used to represent the target which is
# generic. Policy can be defined using 'entity' but it
# would not reflect the exact entity that is in context.
# We are defining collection_name = 'tokens' and
# member_name = 'token' to facilitate policy decisions.
collection_name = 'tokens'
member_name = 'token'
def __init__(self, *args, **kw):
super(Auth, self).__init__(*args, **kw)
config.setup_authentication()
def authenticate_for_token(self, context, auth=None):
"""Authenticate user and issue a token."""
include_catalog = 'nocatalog' not in context['query_string']
try:
auth_info = AuthInfo.create(context, auth=auth)
auth_context = AuthContext(extras={},
method_names=[],
bind={})
self.authenticate(context, auth_info, auth_context)
if auth_context.get('access_token_id'):
auth_info.set_scope(None, auth_context['project_id'], None)
self._check_and_set_default_scoping(auth_info, auth_context)
(domain_id, project_id, trust) = auth_info.get_scope()
method_names = auth_info.get_method_names()
method_names += auth_context.get('method_names', [])
# make sure the list is unique
method_names = list(set(method_names))
expires_at = auth_context.get('expires_at')
# NOTE(morganfainberg): define this here so it is clear what the
# argument is during the issue_v3_token provider call.
metadata_ref = None
token_audit_id = auth_context.get('audit_id')
(token_id, token_data) = self.token_provider_api.issue_v3_token(
auth_context['user_id'], method_names, expires_at, project_id,
domain_id, auth_context, trust, metadata_ref, include_catalog,
parent_audit_id=token_audit_id)
# NOTE(wanghong): We consume a trust use only when we are using
# trusts and have successfully issued a token.
if trust:
self.trust_api.consume_use(trust['id'])
return render_token_data_response(token_id, token_data,
created=True)
except exception.TrustNotFound as e:
raise exception.Unauthorized(e)
def _check_and_set_default_scoping(self, auth_info, auth_context):
(domain_id, project_id, trust) = auth_info.get_scope()
if trust:
project_id = trust['project_id']
if domain_id or project_id or trust:
# scope is specified
return
# Skip scoping when unscoped federated token is being issued
if federation.IDENTITY_PROVIDER in auth_context:
return
# fill in default_project_id if it is available
try:
user_ref = self.identity_api.get_user(auth_context['user_id'])
except exception.UserNotFound as e:
LOG.exception(e)
raise exception.Unauthorized(e)
default_project_id = user_ref.get('default_project_id')
if not default_project_id:
# User has no default project. He shall get an unscoped token.
return
# make sure user's default project is legit before scoping to it
try:
default_project_ref = self.assignment_api.get_project(
default_project_id)
default_project_domain_ref = self.assignment_api.get_domain(
default_project_ref['domain_id'])
if (default_project_ref.get('enabled', True) and
default_project_domain_ref.get('enabled', True)):
if self.assignment_api.get_roles_for_user_and_project(
user_ref['id'], default_project_id):
auth_info.set_scope(project_id=default_project_id)
else:
msg = _("User %(user_id)s doesn't have access to"
" default project %(project_id)s. The token will"
" be unscoped rather than scoped to the project.")
LOG.warning(msg,
{'user_id': user_ref['id'],
'project_id': default_project_id})
else:
msg = _("User %(user_id)s's default project %(project_id)s is"
" disabled. The token will be unscoped rather than"
" scoped to the project.")
LOG.warning(msg,
{'user_id': user_ref['id'],
'project_id': default_project_id})
except (exception.ProjectNotFound, exception.DomainNotFound):
# default project or default project domain doesn't exist,
# will issue unscoped token instead
msg = _("User %(user_id)s's default project %(project_id)s not"
" found. The token will be unscoped rather than"
" scoped to the project.")
LOG.warning(msg, {'user_id': user_ref['id'],
'project_id': default_project_id})
def authenticate(self, context, auth_info, auth_context):
"""Authenticate user."""
# The 'external' method allows any 'REMOTE_USER' based authentication
# In some cases the server can set REMOTE_USER as '' instead of
# dropping it, so this must be filtered out
if context['environment'].get('REMOTE_USER'):
try:
external = get_auth_method('external')
external.authenticate(context, auth_info, auth_context)
except exception.AuthMethodNotSupported:
# This will happen there is no 'external' plugin registered
# and the container is performing authentication.
# The 'kerberos' and 'saml' methods will be used this way.
# In those cases, it is correct to not register an
# 'external' plugin; if there is both an 'external' and a
# 'kerberos' plugin, it would run the check on identity twice.
pass
except exception.Unauthorized:
# If external fails then continue and attempt to determine
# user identity using remaining auth methods
pass
# need to aggregate the results in case two or more methods
# are specified
auth_response = {'methods': []}
for method_name in auth_info.get_method_names():
method = get_auth_method(method_name)
resp = method.authenticate(context,
auth_info.get_method_data(method_name),
auth_context)
if resp:
auth_response['methods'].append(method_name)
auth_response[method_name] = resp
if auth_response["methods"]:
# authentication continuation required
raise exception.AdditionalAuthRequired(auth_response)
if 'user_id' not in auth_context:
msg = _('User not found')
raise exception.Unauthorized(msg)
@controller.protected()
def check_token(self, context):
token_id = context.get('subject_token_id')
token_data = self.token_provider_api.validate_v3_token(
token_id)
# NOTE(morganfainberg): The code in
# ``keystone.common.wsgi.render_response`` will remove the content
# body.
return render_token_data_response(token_id, token_data)
@controller.protected()
def revoke_token(self, context):
token_id = context.get('subject_token_id')
return self.token_provider_api.revoke_token(token_id)
@controller.protected()
def validate_token(self, context):
token_id = context.get('subject_token_id')
include_catalog = 'nocatalog' not in context['query_string']
token_data = self.token_provider_api.validate_v3_token(
token_id)
if not include_catalog and 'catalog' in token_data['token']:
del token_data['token']['catalog']
return render_token_data_response(token_id, token_data)
@controller.protected()
def revocation_list(self, context, auth=None):
if not CONF.token.revoke_by_id:
raise exception.Gone()
tokens = self.token_provider_api.list_revoked_tokens()
for t in tokens:
expires = t['expires']
if not (expires and isinstance(expires, six.text_type)):
t['expires'] = timeutils.isotime(expires)
data = {'revoked': tokens}
json_data = jsonutils.dumps(data)
signed_text = cms.cms_sign_text(json_data,
CONF.signing.certfile,
CONF.signing.keyfile)
return {'signed': signed_text}
def get_auth_context(self, context):
# TODO(dolphm): this method of accessing the auth context is terrible,
# but context needs to be refactored to always have reasonable values.
env_context = context.get('environment', {})
return env_context.get(authorization.AUTH_CONTEXT_ENV, {})
def _combine_lists_uniquely(self, a, b):
# it's most likely that only one of these will be filled so avoid
# the combination if possible.
if a and b:
return dict((x['id'], x) for x in a + b).values()
else:
return a or b
@controller.protected()
def get_auth_projects(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
user_refs = []
if user_id:
try:
user_refs = self.assignment_api.list_projects_for_user(user_id)
except exception.UserNotFound:
# federated users have an id but they don't link to anything
pass
group_ids = auth_context.get('group_ids')
grp_refs = []
if group_ids:
grp_refs = self.assignment_api.list_projects_for_groups(group_ids)
refs = self._combine_lists_uniquely(user_refs, grp_refs)
return assignment_controllers.ProjectV3.wrap_collection(context, refs)
@controller.protected()
def get_auth_domains(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
user_refs = []
if user_id:
try:
user_refs = self.assignment_api.list_domains_for_user(user_id)
except exception.UserNotFound:
# federated users have an id but they don't link to anything
pass
group_ids = auth_context.get('group_ids')
grp_refs = []
if group_ids:
grp_refs = self.assignment_api.list_domains_for_groups(group_ids)
refs = self._combine_lists_uniquely(user_refs, grp_refs)
return assignment_controllers.DomainV3.wrap_collection(context, refs)
@controller.protected()
def get_auth_catalog(self, context):
auth_context = self.get_auth_context(context)
user_id = auth_context.get('user_id')
project_id = auth_context.get('project_id')
if not project_id:
raise exception.Forbidden(
_('A project-scoped token is required to produce a service '
'catalog.'))
# The V3Controller base methods mostly assume that you're returning
# either a collection or a single element from a collection, neither of
# which apply to the catalog. Because this is a special case, this
# re-implements a tiny bit of work done by the base controller (such as
# self-referential link building) to avoid overriding or refactoring
# several private methods.
return {
'catalog': self.catalog_api.get_v3_catalog(user_id, project_id),
'links': {'self': self.base_url(context, path='auth/catalog')}
}
# FIXME(gyee): not sure if it belongs here or keystone.common. Park it here
# for now.
def render_token_data_response(token_id, token_data, created=False):
"""Render token data HTTP response.
Stash token ID into the X-Subject-Token header.
"""
headers = [('X-Subject-Token', token_id)]
if created:
status = (201, 'Created')
else:
status = (200, 'OK')
return wsgi.render_response(body=token_data,
status=status, headers=headers)
|
[
"[email protected]"
] | |
d7f322eb83e4c6777bb715eb2d33ac92cdd6091e
|
6146e33102797407ede06ce2daa56c28fdfa2812
|
/python/GafferSceneUI/CopyOptionsUI.py
|
ec82897c8adf729127236f6d11032db45fc406ed
|
[
"BSD-3-Clause"
] |
permissive
|
GafferHQ/gaffer
|
e1eb78ba8682bfbb7b17586d6e7b47988c3b7d64
|
59cab96598c59b90bee6d3fc1806492a5c03b4f1
|
refs/heads/main
| 2023-09-01T17:36:45.227956 | 2023-08-30T09:10:56 | 2023-08-30T09:10:56 | 9,043,124 | 707 | 144 |
BSD-3-Clause
| 2023-09-14T09:05:37 | 2013-03-27T00:04:53 |
Python
|
UTF-8
|
Python
| false | false | 2,430 |
py
|
##########################################################################
#
# Copyright (c) 2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferScene
##########################################################################
# Metadata
##########################################################################
Gaffer.Metadata.registerNode(
GafferScene.CopyOptions,
"description",
"""
A node which copies options from a source scene.
""",
plugs = {
"options" : [
"description",
"""
The names of the options to be copied. Names should be
separated by spaces and can use Gaffer's standard wildcards.
""",
],
"source" : [
"description",
"""
The source of the options to be copied.
""",
],
}
)
|
[
"[email protected]"
] | |
9a4de1381f1804be5dae1d274c222ccdc3d44048
|
21c09799d006ed6bede4123d57d6d54d977c0b63
|
/python2/framework/Drawer.py
|
3db7fba10a54fe603542ecd1b16e825c7fa55e41
|
[] |
no_license
|
corvettettt/DijetRootTreeAnalyzer
|
68cb12e6b280957e1eb22c9842b0b9b30ae2c779
|
e65624ffc105798209436fc80fb82e2c252c6344
|
refs/heads/master
| 2021-05-06T09:57:12.816787 | 2019-04-18T15:32:38 | 2019-04-18T15:32:38 | 114,043,763 | 1 | 0 | null | 2017-12-12T22:02:46 | 2017-12-12T22:02:46 | null |
UTF-8
|
Python
| false | false | 1,924 |
py
|
import os
import ROOT as rt
from rootTools import tdrstyle as setTDRStyle
class Drawer():
"""Class to draw overlayed histos for data and signals"""
def __init__(self, hData, hSignal):
print "Drawer::init"
self._hData = hData
self._hSignal = hSignal
self._dataHistos = {}
self._sigHistos = {}
#get the histos
for sample,opts in hData.items():
self._dataHistos[sample] = self.loopfile(opts[0])
for sample,opts in hSignal.items():
self._sigHistos[sample] = self.loopfile(opts[0])
def scalePlots(self, lumi):
for sample,opts in self._hSignal.items():
for histo in self._sigHistos[sample]:
integral = histo.Integral()
if integral > 0:
print opts[1]*lumi/integral
histo.Scale(opts[1]*lumi/integral)
def loopfile(self, infile):
print "Drawer::loopfile",infile
hlist = []
rootFile = rt.TFile(infile)
hnames = [k.GetName() for k in rootFile.GetListOfKeys()]
for name in hnames:
myTh1 = rootFile.Get(name)
myTh1.SetDirectory(0)
hlist.append(myTh1)
return hlist
def setStyle(self):
print "Drawer::setStyle"
setTDRStyle.setTDRStyle()
def addRatioBox(self, histo):
print "Drawer::addRatioBox"
def printPlots(self, outPath):
print "Drawer::printCanvas"
self.setStyle()
for it,dataplot in enumerate(self._dataHistos["data"]):
corrCanvas = rt.TCanvas()
dataplot.Draw()
for mcsample,hlist in self._sigHistos.items():
hlist[it].Draw("sames")
corrCanvas.Print(outPath+'/'+dataplot.GetName()+'.pdf')
#self.addRatioBox()
#def drawSignificance
|
[
"[email protected]"
] | |
6aa09349aae1917b65ddc6cca2c954c8cddbb364
|
bd5303f1fd7a6b8244c9d7f2f9037fd52f55686a
|
/crawler.py
|
08d56a24c3aac2d588ee945756150c353267800b
|
[] |
no_license
|
mmmmkin/crawler
|
dfe446871ca09844e1dc8182d370f89cf24d2c78
|
8247270733ebd07284d93539c59e460e3d4458d7
|
refs/heads/master
| 2020-05-25T11:01:54.044397 | 2019-05-20T12:47:28 | 2019-05-20T12:47:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 628 |
py
|
import ssl
from datetime import datetime
from urllib.request import Request, urlopen
def crawling(url='', encoding='utf-8'):
try:
request = Request(url)
ssl._create_default_https_context = ssl._create_unverified_context
resp = urlopen(request)
try:
receive = resp.read()
result = receive.decode(encoding)
except UnicodeDecodeError:
result = receive.decode(encoding, 'replace')
print('%s : success for reuqest(%s)' % (datetime.now(), url))
return result
except Exception as e:
print('%s : %s' % (e, datetime.now()))
|
[
"[email protected]"
] | |
a583c5437d4f807b407d482f5e2221cce5862b2f
|
677f4896f21c46aee199c9f84c012c9733ece6f6
|
/ddsp/losses_test.py
|
dd4a23020c2d427443b4fb0f729ffa8fa5546d67
|
[
"Apache-2.0"
] |
permissive
|
werkaaa/ddsp
|
90b2881a350dad9f954e28ead4f145140c7d2ad4
|
92ce8724e22c17822d7f7564547733ed7fe918e2
|
refs/heads/master
| 2022-12-13T06:41:48.871697 | 2020-09-03T15:50:49 | 2020-09-03T15:50:49 | 286,489,693 | 0 | 0 |
Apache-2.0
| 2020-08-10T13:57:00 | 2020-08-10T13:57:00 | null |
UTF-8
|
Python
| false | false | 1,770 |
py
|
# Copyright 2020 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for ddsp.losses."""
from ddsp import losses
import numpy as np
import tensorflow.compat.v2 as tf
class SpectralLossTest(tf.test.TestCase):
def test_output_shape_is_correct(self):
"""Test correct shape with all losses active."""
loss_obj = losses.SpectralLoss(
mag_weight=1.0,
delta_time_weight=1.0,
delta_freq_weight=1.0,
cumsum_freq_weight=1.0,
logmag_weight=1.0,
loudness_weight=1.0,
)
input_audio = tf.ones((3, 8000), dtype=tf.float32)
target_audio = tf.ones((3, 8000), dtype=tf.float32)
loss = loss_obj(input_audio, target_audio)
self.assertListEqual([], loss.shape.as_list())
self.assertTrue(np.isfinite(loss))
class PretrainedCREPEEmbeddingLossTest(tf.test.TestCase):
def test_output_shape_is_correct(self):
loss_obj = losses.PretrainedCREPEEmbeddingLoss()
input_audio = tf.ones((3, 16000), dtype=tf.float32)
target_audio = tf.ones((3, 16000), dtype=tf.float32)
loss = loss_obj(input_audio, target_audio)
self.assertListEqual([], loss.shape.as_list())
self.assertTrue(np.isfinite(loss))
if __name__ == '__main__':
tf.test.main()
|
[
"[email protected]"
] | |
09cce8fbbaa41efbc8ae40424576b47d84a05964
|
0e4d09b2a1b93aaa6d623d16905854d993a934ae
|
/Python/Django/belt_reviewer/apps/bookReviews/admin.py
|
ac72368c4b888b1d7d074e846a9860e09e00d9d3
|
[] |
no_license
|
freefaller69/DojoAssignments
|
ee7f6308b02041be3244f795422e0e044d4a41b2
|
f40426ac448026c1172048665f36024ad22f0d81
|
refs/heads/master
| 2021-01-17T10:23:39.419514 | 2017-07-25T00:50:41 | 2017-07-25T00:50:41 | 84,012,790 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 261 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Book, Author, Review
# Register your models here.
admin.site.register(Book)
admin.site.register(Author)
admin.site.register(Review)
|
[
"[email protected]"
] | |
5686d5a00a202f2b6cb60723f475bbd967b5cc76
|
5b4c803f68e52849a1c1093aac503efc423ad132
|
/UnPyc/tests/tests/CFG/2/return/return_if+elif_if+elif+else_.py
|
4d3f6e3a97f865aea7e5a8d94a62fe7ad96b04a8
|
[] |
no_license
|
Prashant-Jonny/UnPyc
|
9ce5d63b1e0d2ec19c1faa48d932cc3f71f8599c
|
4b9d4ab96dfc53a0b4e06972443e1402e9dc034f
|
refs/heads/master
| 2021-01-17T12:03:17.314248 | 2013-02-22T07:22:35 | 2013-02-22T07:22:35 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 163 |
py
|
def f():
if 1:
if 1:
return
elif 1:
return
elif 1:
if 1:
return
elif 1:
return
else:
if 1:
return
elif 1:
return
|
[
"[email protected]"
] | |
e6dddca50724f057823cd02a76e2d1c2cb00d118
|
44e6fecee8710156333e171ad38a2b4d4cd4e3e3
|
/2-numpy/ex.5.19.py
|
d32d0241d72a77738eec995557aa944742c2e792
|
[] |
no_license
|
3141592/data-science
|
5b0291ca40b275a1624e699828db5e63b5502b3c
|
f4f9bec56ee09bbd521b6dbacb0b221693a78637
|
refs/heads/master
| 2021-05-04T14:43:09.094396 | 2018-02-04T20:31:06 | 2018-02-04T20:31:06 | 120,209,026 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,083 |
py
|
import numpy as np
print("arr")
arr = np.arange(0,11)
print(arr)
print("=============")
print("arr[8]")
print(arr[8])
print("=============")
print("arr[1:6]: ")
print arr[1:6]
print("=============")
print "arr[:6]"
print arr[:6]
print("=============")
print "arr[0:5] = 100"
arr[0:5] = 100
print arr
print("=============")
arr_2d = np.array([[5,10,15],[20,25,30],[30,35,40]])
print "arr_2d"
print arr_2d
print("=============")
print "arr_2d[0][0]"
print arr_2d[0][0]
print "arr_2d[1][1] = 25"
print arr_2d[1][1]
print "arr_2d[0][2] = 15"
print arr_2d[0][2]
print "arr_2d[0,0]"
print arr_2d[0,0]
print "arr_2d[1,1] = 25"
print arr_2d[1,1]
print "arr_2d[0,2] = 15"
print arr_2d[0,2]
print "=============="
arr3 = (np.random.rand(1,25)*10).reshape(5,5)
print "arr3"
print arr3
print "arr3[:3,2:]"
print arr3[:3,2:]
print "arr3[:3,:2]"
print arr3[:3,:2]
print "=============="
arr = np.arange(1,11)
print "np.arange91,11)"
print arr
print "arr > 5"
print arr > 5
bool_arr = arr > 6
print "bool_arr"
print bool_arr
print "arr[bool_arr]"
print arr[bool_arr]
|
[
"[email protected]"
] | |
8e3ef950698fbf7d3e8c20133ccd2085180d1c8d
|
e204cdd8a38a247aeac3d07f6cce6822472bdcc5
|
/.history/app_test_django/models_20201116131143.py
|
ff79574c97012778681ec2385d75639933239b9a
|
[] |
no_license
|
steven-halla/python-test
|
388ad8386662ad5ce5c1a0976d9f054499dc741b
|
0b760a47d154078002c0272ed1204a94721c802a
|
refs/heads/master
| 2023-04-08T03:40:00.453977 | 2021-04-09T19:12:29 | 2021-04-09T19:12:29 | 354,122,365 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,333 |
py
|
from django.db import models
import re
class UserManager(models.Manager):
def user_registration_validator(self, post_data):
errors = {}
EMAIL_REGEX = re.compile(
r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
if len(post_data['first_name']) < 3:
errors['first_name'] = "First name must be 3 characters"
if post_data['first_name'].isalpha() == False:
errors['first_name'] = "letters only"
if len(post_data['last_name']) < 3:
errors['last_name'] = "Last name must be 3 characters"
if post_data['last_name'].isalpha() == False:
errors['last_name'] = "letters only"
if len(post_data['email']) < 8:
errors['email'] = "Email must contain 8 characters"
#if post_data['email'].Books.objects.filter(title=post_data) == True:
# errors['email'] ="this email already exist in database"
if post_data['email'].find("@") == -1:
errors['email'] = "email must contain @ and .com"
if post_data['email'].find(".com") == -1:
errors['email'] = "email must contain @ and .com"
# test whether a field matches the pattern
if not EMAIL_REGEX.match(post_data['email']):
errors['email'] = "Invalid email address!"
if post_data['password'] != post_data['confirm_password']:
errors['pass_match'] = "password must match confirm password"
if len(post_data['password']) < 8:
errors['pass_length'] = "password must be longer than 8 characters"
return errors
class User(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
email = models.CharField(max_length=20)
password = models.CharField(max_length=20)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class TripManager(models.Manager):
def add_trip_validator(self, post_data)
class Trip(models.Model):
destination = models.CharField(max_length=20)
startdate = models.DateTimeField()
enddate = models.DateTimeField()
plan = models.CharField(max_length=30)
uploaded_by = models.ForeignKey(User, related_name="trip_uploaded", on_delete=models.CASCADE)
|
[
"[email protected]"
] | |
7debb913ce33acbbf107e40036794d0f9b9fd499
|
affdb1186825486d40c1140314cc04fe63b153b7
|
/bike-sharing-demand/preprocessing/preprocessing.py
|
1f949627fbcd95c8b691834f548fd29c225a61bb
|
[] |
no_license
|
Yagami360/kaggle_exercises
|
2f9a8a12c48a6e55ded6c626ceef5fb0cfca935b
|
17b731bb6f1ce0b81254047ffc56371f4c485df0
|
refs/heads/master
| 2022-11-22T23:00:27.176123 | 2020-07-23T05:05:00 | 2020-07-23T05:05:00 | 252,343,652 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,293 |
py
|
import os
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
def preprocessing( args, df_train, df_test ):
# 全データセット
df_data = pd.concat([df_train, df_test], sort=False)
# 時系列データを処理
df_train['year'] = [t.year for t in pd.DatetimeIndex(df_train.datetime)]
df_train['year'] = df_train['year'].map( {2011:0, 2012:1} )
df_train["month"] = [t.month for t in pd.DatetimeIndex(df_train.datetime)]
df_train["day"] = [t.dayofweek for t in pd.DatetimeIndex(df_train.datetime)]
df_train["hour"] = [t.hour for t in pd.DatetimeIndex(df_train.datetime)]
df_train["weekday"] = [t for t in pd.DatetimeIndex(df_train.datetime).weekday]
df_test['year'] = [t.year for t in pd.DatetimeIndex(df_test.datetime)]
df_test['year'] = df_test['year'].map( {2011:0, 2012:1} )
df_test["month"] = [t.month for t in pd.DatetimeIndex(df_test.datetime)]
df_test["day"] = [t.dayofweek for t in pd.DatetimeIndex(df_test.datetime)]
df_test["hour"] = [t.hour for t in pd.DatetimeIndex(df_test.datetime)]
df_test["weekday"] = [t for t in pd.DatetimeIndex(df_test.datetime).weekday]
# 無用なデータを除外
df_train.drop(["casual", "registered"], axis=1, inplace=True)
df_train.drop(["datetime"], axis=1, inplace=True)
df_test.drop(["datetime"], axis=1, inplace=True)
# 全特徴量を一括で処理
for col in df_train.columns:
if( args.debug ):
print( "df_train[{}].dtypes ] : {}".format(col, df_train[col].dtypes))
# 目的変数
if( col in ["count"] ):
if( args.target_norm ):
# 正規分布に従うように対数化
df_train[col] = pd.Series( np.log(df_train[col].values), name=col )
continue
#-----------------------------
# ラベル情報のエンコード
#-----------------------------
if( df_train[col].dtypes == "object" ):
label_encoder = LabelEncoder()
label_encoder.fit(list(df_train[col]))
df_train[col] = label_encoder.transform(list(df_train[col]))
label_encoder = LabelEncoder()
label_encoder.fit(list(df_test[col]))
df_test[col] = label_encoder.transform(list(df_test[col]))
#-----------------------------
# 欠損値の埋め合わせ
#-----------------------------
"""
# NAN 値の埋め合わせ(平均値)
pass
# NAN 値の埋め合わせ(ゼロ値)/ int 型
if( df_train[col].dtypes in ["int8", "int16", "int32", "int64", "uint8", "uint16", "uint32", "uint64"] ):
df_train[col].fillna(0, inplace=True)
df_test[col].fillna(0, inplace=True)
# NAN 値の埋め合わせ(ゼロ値)/ float 型
elif( df_train[col].dtypes in ["float16", "float32", "float64", "float128"] ):
df_train[col].fillna(0.0, inplace=True)
df_test[col].fillna(0.0, inplace=True)
# NAN 値の補完(None値)/ object 型
else:
df_train[col] = df_train[col].fillna('NA')
df_test[col] = df_test[col].fillna('NA')
"""
#-----------------------------
# 正規化処理
#-----------------------------
if( args.input_norm ):
#if( df_train[col].dtypes != "object" ):
if( df_train[col].dtypes in ["float16", "float32", "float64", "float128"] ):
scaler = StandardScaler()
scaler.fit( df_train[col].values.reshape(-1,1) )
df_train[col] = scaler.fit_transform( df_train[col].values.reshape(-1,1) )
df_test[col] = scaler.fit_transform( df_test[col].values.reshape(-1,1) )
#-----------------------------
# 値が単一の特徴量をクレンジング
#-----------------------------
if( df_train[col].nunique() == 1 ):
print( "remove {} : {}".format(col,df_train[col].nunique()) )
df_train.drop([col], axis=1, inplace=True)
df_test.drop([col], axis=1, inplace=True)
return df_train, df_test
|
[
"[email protected]"
] | |
7dc2bc6f99ef24ea573366cb23999dea0e981450
|
ed54290846b5c7f9556aacca09675550f0af4c48
|
/salt/salt/modules/win_path.py
|
7e1601e22563416628aa2d70fd625b8e953dbe4f
|
[
"Apache-2.0"
] |
permissive
|
smallyear/linuxLearn
|
87226ccd8745cd36955c7e40cafd741d47a04a6f
|
342e5020bf24b5fac732c4275a512087b47e578d
|
refs/heads/master
| 2022-03-20T06:02:25.329126 | 2019-08-01T08:39:59 | 2019-08-01T08:39:59 | 103,765,131 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,719 |
py
|
# -*- coding: utf-8 -*-
'''
Manage the Windows System PATH
Note that not all Windows applications will rehash the PATH environment variable,
Only the ones that listen to the WM_SETTINGCHANGE message
http://support.microsoft.com/kb/104011
'''
from __future__ import absolute_import
# Python Libs
import logging
import re
import os
from salt.ext.six.moves import map
# Third party libs
try:
import win32gui
import win32con
HAS_WIN32 = True
except ImportError:
HAS_WIN32 = False
# Import salt libs
import salt.utils
# Settings
log = logging.getLogger(__name__)
def __virtual__():
'''
Load only on Windows
'''
if salt.utils.is_windows() and HAS_WIN32:
return 'win_path'
return False
def _normalize_dir(string):
'''
Normalize the directory to make comparison possible
'''
return re.sub(r'\\$', '', string.lower())
def rehash():
'''
Send a WM_SETTINGCHANGE Broadcast to Windows to refresh the Environment variables
CLI Example:
... code-block:: bash
salt '*' win_path.rehash
'''
return win32gui.SendMessageTimeout(win32con.HWND_BROADCAST,
win32con.WM_SETTINGCHANGE,
0,
'Environment',
0,
10000)[0] == 1
def get_path():
'''
Returns a list of items in the SYSTEM path
CLI Example:
.. code-block:: bash
salt '*' win_path.get_path
'''
ret = __salt__['reg.read_value']('HKEY_LOCAL_MACHINE',
'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment',
'PATH')['vdata'].split(';')
# Trim ending backslash
return list(map(_normalize_dir, ret))
def exists(path):
'''
Check if the directory is configured in the SYSTEM path
Case-insensitive and ignores trailing backslash
Returns:
boolean True if path exists, False if not
CLI Example:
.. code-block:: bash
salt '*' win_path.exists 'c:\\python27'
salt '*' win_path.exists 'c:\\python27\\'
salt '*' win_path.exists 'C:\\pyThon27'
'''
path = _normalize_dir(path)
sysPath = get_path()
return path in sysPath
def add(path, index=0):
'''
Add the directory to the SYSTEM path in the index location
Returns:
boolean True if successful, False if unsuccessful
CLI Example:
.. code-block:: bash
# Will add to the beginning of the path
salt '*' win_path.add 'c:\\python27' 0
# Will add to the end of the path
salt '*' win_path.add 'c:\\python27' index='-1'
'''
currIndex = -1
sysPath = get_path()
path = _normalize_dir(path)
index = int(index)
# validate index boundaries
if index < 0:
index = len(sysPath) + index + 1
if index > len(sysPath):
index = len(sysPath)
localPath = os.environ["PATH"].split(os.pathsep)
if path not in localPath:
localPath.append(path)
os.environ["PATH"] = os.pathsep.join(localPath)
# Check if we are in the system path at the right location
try:
currIndex = sysPath.index(path)
if currIndex != index:
sysPath.pop(currIndex)
else:
return True
except ValueError:
pass
# Add it to the Path
sysPath.insert(index, path)
regedit = __salt__['reg.set_value'](
'HKEY_LOCAL_MACHINE',
'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment',
'PATH',
';'.join(sysPath),
'REG_EXPAND_SZ'
)
# Broadcast WM_SETTINGCHANGE to Windows
if regedit:
return rehash()
else:
return False
def remove(path):
r'''
Remove the directory from the SYSTEM path
Returns:
boolean True if successful, False if unsuccessful
CLI Example:
.. code-block:: bash
# Will remove C:\Python27 from the path
salt '*' win_path.remove 'c:\\python27'
'''
path = _normalize_dir(path)
sysPath = get_path()
localPath = os.environ["PATH"].split(os.pathsep)
if path in localPath:
localPath.remove(path)
os.environ["PATH"] = os.pathsep.join(localPath)
try:
sysPath.remove(path)
except ValueError:
return True
regedit = __salt__['reg.set_value'](
'HKEY_LOCAL_MACHINE',
'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment',
'PATH',
';'.join(sysPath),
'REG_EXPAND_SZ'
)
if regedit:
return rehash()
else:
return False
|
[
"[email protected]"
] | |
82097c0eafbc46e1235c7382b6d048e7d4ef8aa8
|
eec267b544295bccb2ab88b13b221ff4fd3d2985
|
/test_plot_rms_map.py
|
c6bf17198c876c8af1635ee13bbe3c644bcfd488
|
[] |
no_license
|
ralfcam/sandbox_scripts
|
dda368dcf8b8d01147660dedc6d0fcae2d15f80c
|
6fa53a63152c4a00396b38fb92ae7dc6f72d6b90
|
refs/heads/master
| 2022-05-29T02:02:24.849913 | 2020-05-01T02:23:57 | 2020-05-01T02:23:57 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,219 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 28 13:42:25 2018
@author: jpeacock
"""
import mtpy.modeling.modem as modem
import matplotlib.pyplot as plt
import scipy.interpolate as interpolate
import numpy as np
res_fn = r"c:\\Users\\jpeacock\\Documents\\Geothermal\\Umatilla\\modem_inv\\inv_03\\um_err03_cov03_NLCG_130.res"
prms = modem.PlotRMSMaps(res_fn, plot_yn='n')
#prms.period_index = 15
#prms.plot_map()
prms.plot_loop(style='map', fig_format='pdf')
#d = modem.Data()
#d.read_data_file(res_fn)
#
#lat = d.data_array['lat']
#lon = d.data_array['lon']
#rms_arr = d.data_array['z'][:, 0, 0, 1].__abs__()/d.data_array['z_err'][:, 0, 0, 1].real
#
#x = np.linspace(lon.min(), lon.max(), 100)
#y = np.linspace(lat.min(), lat.max(), 100)
#
#grid_x, grid_y = np.meshgrid(x, y)
#
#points = np.array([lon, lat])
#
#rms_map = interpolate.griddata(points.T,
# np.nan_to_num(rms_arr),
# (grid_x, grid_y),
# method='cubic')
#
#fig = plt.figure(3)
#fig.clf()
#ax = fig.add_subplot(1, 1, 1, aspect='equal')
#im = ax.pcolormesh(grid_x, grid_y, rms_map, cmap='jet', vmin=0, vmax=5)
#plt.colorbar(im, ax=ax, shrink=.6)
#plt.show()
|
[
"[email protected]"
] | |
7a6854dfe3d5144790dab5b421192301bae5f7d5
|
e4724c9894768a3af353deb08c572324a9d18e3e
|
/workflowengine/lib/ConcurrenceSocket.py
|
13eda1835f608ce948342aafa7bd639dc46a648c
|
[] |
no_license
|
racktivity/ext-pylabs-workflowengine
|
da85719dee9a21f448cb75cb7340367bb29543bb
|
28e01647852e2050fe971df9d5857e734b672980
|
refs/heads/master
| 2021-01-10T04:50:40.484585 | 2011-09-15T14:46:34 | 2011-09-15T14:46:34 | 54,315,007 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 14,298 |
py
|
# fryckbos:
# Changed tasklet calls from stackless to concurrence.
# Don't use Tasklet.yield_(): sleeping tasks won't be awaken.
#
# Stackless compatible socket module:
#
# Author: Richard Tew <[email protected]>
#
# This wraps the asyncore module and the dispatcher class it provides in order
# write a socket module replacement that uses channels to allow calls to it to
# block until a delayed event occurs.
#
# Not all aspects of the socket module are provided by this file. Examples of
# it in use can be seen at the bottom of this file.
#
# Possible improvements:
# - More correct error handling. When there is an error on a socket found by
# poll, there is no idea what it actually is.
# - Launching each bit of incoming data in its own tasklet on the recvChannel
# send is a little over the top. It should be possible to add it to the
# rest of the queued data
import stackless
from concurrence import Tasklet, dispatch
import asyncore, weakref
import socket as stdsocket # We need the "socket" name for the function we export.
# If we are to masquerade as the socket module, we need to provide the constants.
if "__all__" in stdsocket.__dict__:
__all__ = stdsocket.__dict__
for k, v in stdsocket.__dict__.iteritems():
if k in __all__:
globals()[k] = v
elif k == "EBADF":
globals()[k] = v
else:
for k, v in stdsocket.__dict__.iteritems():
if k.upper() == k:
globals()[k] = v
error = stdsocket.error
timeout = stdsocket.timeout
# WARNING: this function blocks and is not thread safe.
# The only solution is to spawn a thread to handle all
# getaddrinfo requests. Implementing a stackless DNS
# lookup service is only second best as getaddrinfo may
# use other methods.
getaddrinfo = stdsocket.getaddrinfo
# urllib2 apparently uses this directly. We need to cater for that.
_fileobject = stdsocket._fileobject
# Someone needs to invoke asyncore.poll() regularly to keep the socket
# data moving. The "ManageSockets" function here is a simple example
# of such a function. It is started by StartManager(), which uses the
# global "managerRunning" to ensure that no more than one copy is
# running.
#
# If you think you can do this better, register an alternative to
# StartManager using stacklesssocket_manager(). Your function will be
# called every time a new socket is created; it's your responsibility
# to ensure it doesn't start multiple copies of itself unnecessarily.
#
managerRunning = False
def ManageSockets():
global managerRunning
while len(asyncore.socket_map):
# Check the sockets for activity.
asyncore.poll(0.05)
# Yield to give other tasklets a chance to be scheduled.
Tasklet.sleep(0.01)
managerRunning = False
def StartManager():
global managerRunning
if not managerRunning:
managerRunning = True
Tasklet.new(ManageSockets)()
_manage_sockets_func = StartManager
def stacklesssocket_manager(mgr):
global _manage_sockets_func
_manage_sockets_func = mgr
def socket(*args, **kwargs):
import sys
if "socket" in sys.modules and sys.modules["socket"] is not stdsocket:
raise RuntimeError("Use 'stacklesssocket.install' instead of replacing the 'socket' module")
_realsocket_old = stdsocket._realsocket
_socketobject_old = stdsocket._socketobject
class _socketobject_new(_socketobject_old):
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
# We need to do this here.
if _sock is None:
_sock = _realsocket_old(family, type, proto)
_sock = _fakesocket(_sock)
_manage_sockets_func()
_socketobject_old.__init__(self, family, type, proto, _sock)
if not isinstance(self._sock, _fakesocket):
raise RuntimeError("bad socket")
def accept(self):
sock, addr = self._sock.accept()
sock = _fakesocket(sock)
sock.wasConnected = True
return _socketobject_new(_sock=sock), addr
accept.__doc__ = _socketobject_old.accept.__doc__
def check_still_connected(f):
" Decorate socket functions to check they are still connected. "
def new_f(self, *args, **kwds):
if not self.connected:
# The socket was never connected.
if not self.wasConnected:
raise error(10057, "Socket is not connected")
# The socket has been closed already.
raise error(EBADF, 'Bad file descriptor')
return f(self, *args, **kwds)
return new_f
def install():
if stdsocket._realsocket is socket:
raise StandardError("Still installed")
stdsocket._realsocket = socket
stdsocket.socket = stdsocket.SocketType = stdsocket._socketobject = _socketobject_new
def uninstall():
stdsocket._realsocket = _realsocket_old
stdsocket.socket = stdsocket.SocketType = stdsocket._socketobject = _socketobject_old
class _fakesocket(asyncore.dispatcher):
connectChannel = None
acceptChannel = None
recvChannel = None
wasConnected = False
def __init__(self, realSocket):
# This is worth doing. I was passing in an invalid socket which
# was an instance of _fakesocket and it was causing tasklet death.
if not isinstance(realSocket, _realsocket_old):
raise StandardError("An invalid socket passed to fakesocket %s" % realSocket.__class__)
# This will register the real socket in the internal socket map.
asyncore.dispatcher.__init__(self, realSocket)
self.socket = realSocket
self.recvChannel = stackless.channel()
self.readString = ''
self.readIdx = 0
self.sendBuffer = ''
self.sendToBuffers = []
def __del__(self):
# There are no more users (sockets or files) of this fake socket, we
# are safe to close it fully. If we don't, asyncore will choke on
# the weakref failures.
self.close()
# The asyncore version of this function depends on socket being set
# which is not the case when this fake socket has been closed.
def __getattr__(self, attr):
if not hasattr(self, "socket"):
raise AttributeError("socket attribute unset on '"+ attr +"' lookup")
return getattr(self.socket, attr)
def add_channel(self, map=None):
if map is None:
map = self._map
map[self._fileno] = weakref.proxy(self)
def writable(self):
if self.socket.type != SOCK_DGRAM and not self.connected:
return True
return len(self.sendBuffer) or len(self.sendToBuffers)
def accept(self):
if not self.acceptChannel:
self.acceptChannel = stackless.channel()
return self.acceptChannel.receive()
def connect(self, address):
asyncore.dispatcher.connect(self, address)
# UDP sockets do not connect.
if self.socket.type != SOCK_DGRAM and not self.connected:
if not self.connectChannel:
self.connectChannel = stackless.channel()
# Prefer the sender. Do not block when sending, given that
# there is a tasklet known to be waiting, this will happen.
self.connectChannel.preference = 1
self.connectChannel.receive()
@check_still_connected
def send(self, data, flags=0):
self.sendBuffer += data
Tasklet.sleep(0.01)
return len(data)
@check_still_connected
def sendall(self, data, flags=0):
# WARNING: this will busy wait until all data is sent
# It should be possible to do away with the busy wait with
# the use of a channel.
self.sendBuffer += data
while self.sendBuffer:
Tasklet.sleep(0.01)
return len(data)
def sendto(self, sendData, sendArg1=None, sendArg2=None):
# sendto(data, address)
# sendto(data [, flags], address)
if sendArg2 is not None:
flags = sendArg1
sendAddress = sendArg2
else:
flags = 0
sendAddress = sendArg1
waitChannel = None
for idx, (data, address, channel, sentBytes) in enumerate(self.sendToBuffers):
if address == sendAddress:
self.sendToBuffers[idx] = (data + sendData, address, channel, sentBytes)
waitChannel = channel
break
if waitChannel is None:
waitChannel = stackless.channel()
self.sendToBuffers.append((sendData, sendAddress, waitChannel, 0))
return waitChannel.receive()
# Read at most byteCount bytes.
def recv(self, byteCount, flags=0):
# recv() must not concatenate two or more data fragments sent with
# send() on the remote side. Single fragment sent with single send()
# call should be split into strings of length less than or equal
# to 'byteCount', and returned by one or more recv() calls.
remainingBytes = self.readIdx != len(self.readString)
# TODO: Verify this connectivity behaviour.
if not self.connected:
# Sockets which have never been connected do this.
if not self.wasConnected:
raise error(10057, 'Socket is not connected')
# Sockets which were connected, but no longer are, use
# up the remaining input. Observed this with urllib.urlopen
# where it closes the socket and then allows the caller to
# use a file to access the body of the web page.
elif not remainingBytes:
self.readString = self.recvChannel.receive()
self.readIdx = 0
remainingBytes = len(self.readString)
if byteCount == 1 and remainingBytes:
ret = self.readString[self.readIdx]
self.readIdx += 1
elif self.readIdx == 0 and byteCount >= len(self.readString):
ret = self.readString
self.readString = ""
else:
idx = self.readIdx + byteCount
ret = self.readString[self.readIdx:idx]
self.readString = self.readString[idx:]
self.readIdx = 0
# ret will be '' when EOF.
return ret
def recvfrom(self, byteCount, flags=0):
if self.socket.type == SOCK_STREAM:
return self.recv(byteCount), None
# recvfrom() must not concatenate two or more packets.
# Each call should return the first 'byteCount' part of the packet.
data, address = self.recvChannel.receive()
return data[:byteCount], address
def close(self):
asyncore.dispatcher.close(self)
self.connected = False
self.accepting = False
self.sendBuffer = None # breaks the loop in sendall
# Clear out all the channels with relevant errors.
while self.acceptChannel and self.acceptChannel.balance < 0:
self.acceptChannel.send_exception(error, 9, 'Bad file descriptor')
while self.connectChannel and self.connectChannel.balance < 0:
self.connectChannel.send_exception(error, 10061, 'Connection refused')
while self.recvChannel and self.recvChannel.balance < 0:
# The closing of a socket is indicted by receiving nothing. The
# exception would have been sent if the server was killed, rather
# than closed down gracefully.
self.recvChannel.send("")
#self.recvChannel.send_exception(error, 10054, 'Connection reset by peer')
# asyncore doesn't support this. Why not?
def fileno(self):
return self.socket.fileno()
def handle_accept(self):
if self.acceptChannel and self.acceptChannel.balance < 0:
t = asyncore.dispatcher.accept(self)
if t is None:
return
t[0].setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
Tasklet.new(self.acceptChannel.send)(t)
# Inform the blocked connect call that the connection has been made.
def handle_connect(self):
if self.socket.type != SOCK_DGRAM:
self.wasConnected = True
self.connectChannel.send(None)
def handle_connect_event(self):
err = self.socket.getsockopt(SOL_SOCKET, SO_ERROR)
if err != 0:
self.connected = False
Tasklet.new(self.recvChannel.send_exception)(stdsocket.error, err)
else:
self.handle_connect()
self.connected = True
# Asyncore says its done but self.readBuffer may be non-empty
# so can't close yet. Do nothing and let 'recv' trigger the close.
def handle_close(self):
pass
# Some error, just close the channel and let that raise errors to
# blocked calls.
def handle_expt(self):
self.close()
def handle_read(self):
try:
if self.socket.type == SOCK_DGRAM:
ret = self.socket.recvfrom(20000)
else:
ret = asyncore.dispatcher.recv(self, 20000)
# Not sure this is correct, but it seems to give the
# right behaviour. Namely removing the socket from
# asyncore.
if not ret:
self.close()
Tasklet.new(self.recvChannel.send)(ret)
except stdsocket.error, err:
# If there's a read error assume the connection is
# broken and drop any pending output
if self.sendBuffer:
self.sendBuffer = ""
self.recvChannel.send_exception(stdsocket.error, err)
def handle_write(self):
if len(self.sendBuffer):
sentBytes = asyncore.dispatcher.send(self, self.sendBuffer[:512])
self.sendBuffer = self.sendBuffer[sentBytes:]
elif len(self.sendToBuffers):
data, address, channel, oldSentBytes = self.sendToBuffers[0]
sentBytes = self.socket.sendto(data, address)
totalSentBytes = oldSentBytes + sentBytes
if len(data) > sentBytes:
self.sendToBuffers[0] = data[sentBytes:], address, channel, totalSentBytes
else:
del self.sendToBuffers[0]
Tasklet.new(channel.send)(totalSentBytes)
|
[
"devnull@localhost"
] |
devnull@localhost
|
f28154c1d8284b4c1afcf2b115181573d4880ff2
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5658282861527040_0/Python/ChevalierMalFet/lottery.py
|
187e6b8d05f410b95b0987ef03b00f8a8567c866
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 509 |
py
|
inputFile = open('B-small-attempt0.in', 'r')
outputFile = open('B-small-attempt0.out', 'w')
numTests = int(inputFile.readline())
for i in range(numTests):
nums = map(lambda x: int(x), inputFile.readline().split())
a = nums[0]
b = nums[1]
k = nums[2]
count = 0
for m in range(a):
for n in range(b):
if m&n < k:
count += 1
outputFile.write('Case #'+str(i+1)+': ' + str(count) + '\n')
inputFile.close()
outputFile.close()
|
[
"[email protected]"
] | |
211199e7bff2d13c497e7a08f172ffc279744939
|
3dcf3b4d1822fefc0dcab8195af1239abe7971a1
|
/AMAO/apps/Avaliacao/Questao/views/__init__.py
|
554a3a1467bf3ffcd4b9de57c04ac01067e84fd5
|
[
"MIT"
] |
permissive
|
arruda/amao
|
a1b0abde81be98a04dee22af9ff0723ed7697fb8
|
83648aa2c408b1450d721b3072dc9db4b53edbb8
|
refs/heads/master
| 2021-01-13T02:11:52.776011 | 2014-09-20T15:43:16 | 2014-09-20T15:43:16 | 23,271,083 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 134 |
py
|
#from temporario import *
from resolucao import *
from gabarito import *
from criar import *
from exibir import *
from listar import *
|
[
"[email protected]"
] | |
de77b537516f29eb8f057077717c9b426ad9d33f
|
a4e4c3faa29043fc80f62a8442e2f8333cd23933
|
/MPI_test.py
|
5801da732e8dd08ddffe0d60fb141f04be8e6599
|
[] |
no_license
|
FangYang970206/Intrinsic_Image
|
652ab87c2d95b400cf80c6a49d1863a40d1cba07
|
3b8ec261b7b3aeaa1c611473f53fb4e23b82893b
|
refs/heads/master
| 2023-01-21T05:18:40.748488 | 2020-11-24T02:22:00 | 2020-11-24T02:22:00 | 228,824,635 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,477 |
py
|
import os
import random
import argparse
import torch
import torch.optim as optim
from tensorboardX import SummaryWriter
from torch.backends import cudnn
# import RIN
import RIN
import RIN_pipeline
import numpy as np
import scipy.misc
from utils import *
def main():
random.seed(9999)
torch.manual_seed(9999)
cudnn.benchmark = True
parser = argparse.ArgumentParser()
parser.add_argument('--split', type=str, default='ImageSplit')
parser.add_argument('--mode', type=str, default='test')
parser.add_argument('--save_path', type=str, default='MPI_logs\\RIID_origin_RIN_updateLR_CosBF_VGG0.1_shading_epoch240_ImageSplit_size256\\',
help='save path of model, visualizations, and tensorboard')
parser.add_argument('--loader_threads', type=float, default=8,
help='number of parallel data-loading threads')
parser.add_argument('--state_dict', type=str, default='composer_state_231.t7')
args = parser.parse_args()
# pylint: disable=E1101
device = torch.device("cuda: 1" if torch.cuda.is_available() else 'cpu')
# pylint: disable=E1101
shader = RIN.Shader(output_ch=3)
reflection = RIN.Decomposer()
composer = RIN.Composer(reflection, shader).to(device)
# RIN.init_weights(composer, init_type='kaiming')
# MPI_Image_Split_test_txt = 'D:\\fangyang\\intrinsic_by_fangyang\\MPI_TXT\\MPI_main_imageSplit-fullsize-ChenSplit-test.txt'
# MPI_Scene_Split_test_txt = 'D:\\fangyang\\intrinsic_by_fangyang\\MPI_TXT\\MPI_main_sceneSplit-fullsize-NoDefect-test.txt'
MPI_Image_Split_test_txt = 'D:\\fangyang\\intrinsic_by_fangyang\\MPI_TXT\\MPI_main_imageSplit-256-test.txt'
MPI_Scene_Split_test_txt = 'D:\\fangyang\\intrinsic_by_fangyang\\MPI_TXT\\MPI_main_sceneSplit-256-test.txt'
if args.split == 'ImageSplit':
test_txt = MPI_Image_Split_test_txt
print('Image split mode')
else:
test_txt = MPI_Scene_Split_test_txt
print('Scene split mode')
composer.load_state_dict(torch.load(os.path.join(args.save_path, args.state_dict)))
print('load checkpoint success!')
test_set = RIN_pipeline.MPI_Dataset_Revisit(test_txt)
test_loader = torch.utils.data.DataLoader(test_set, batch_size=1, num_workers=args.loader_threads, shuffle=False)
check_folder(os.path.join(args.save_path, "refl_target"))
check_folder(os.path.join(args.save_path, "shad_target"))
check_folder(os.path.join(args.save_path, "refl_output"))
check_folder(os.path.join(args.save_path, "shad_output"))
check_folder(os.path.join(args.save_path, "shape_output"))
check_folder(os.path.join(args.save_path, "mask"))
composer.eval()
with torch.no_grad():
for ind, tensors in enumerate(test_loader):
print(ind)
inp = [t.to(device) for t in tensors]
input_g, albedo_g, shading_g, mask_g = inp
_, albedo_fake, shading_fake, shape_fake = composer.forward(input_g)
albedo_fake = albedo_fake*mask_g
lab_refl_targ = albedo_g.squeeze().cpu().numpy().transpose(1,2,0)
lab_sha_targ = shading_g.squeeze().cpu().numpy().transpose(1,2,0)
mask = mask_g.squeeze().cpu().numpy().transpose(1,2,0)
refl_pred = albedo_fake.squeeze().cpu().numpy().transpose(1,2,0)
sha_pred = shading_fake.squeeze().cpu().numpy().transpose(1,2,0)
shape_pred = shape_fake.squeeze().cpu().numpy().transpose(1,2,0)
lab_refl_targ = np.clip(lab_refl_targ, 0, 1)
lab_sha_targ = np.clip(lab_sha_targ, 0, 1)
refl_pred = np.clip(refl_pred, 0, 1)
sha_pred = np.clip(sha_pred, 0, 1)
shape_pred = np.clip(shape_pred, 0, 1)
mask = np.clip(mask, 0, 1)
scipy.misc.imsave(os.path.join(args.save_path, "refl_target", "{}.png".format(ind)), lab_refl_targ)
scipy.misc.imsave(os.path.join(args.save_path, "shad_target", "{}.png".format(ind)), lab_sha_targ)
scipy.misc.imsave(os.path.join(args.save_path, "mask", "{}.png".format(ind)), mask)
scipy.misc.imsave(os.path.join(args.save_path, "refl_output", "{}.png".format(ind)), refl_pred)
scipy.misc.imsave(os.path.join(args.save_path, "shad_output", "{}.png".format(ind)), sha_pred)
scipy.misc.imsave(os.path.join(args.save_path, "shape_output", "{}.png".format(ind)), shape_pred)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
4d73a80f98e9a05bb82e58207ff09f60fb78bf00
|
8fcf33eb32de3ebe34934820c4635f9333af224a
|
/preprocessing/modules/utility/__init__.py
|
02345f75b2eff67c8d535e28cb7eacb3d75cf5d5
|
[] |
no_license
|
Eadon999/wsd1
|
95b2bf105829b786393e44e7dbcd14b1009c8147
|
956206de1f22a93e591a0e5500f4f717b323a8ca
|
refs/heads/master
| 2021-10-12T01:14:17.641612 | 2019-01-31T12:37:09 | 2019-01-31T12:37:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 66 |
py
|
from .modules import UtilityModules
__all__ = ['UtilityModules']
|
[
"[email protected]"
] | |
0b4f08bcee570cf762e0b682205d8fdcec785a9e
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_289/ch88_2020_05_11_13_11_45_850336.py
|
5e8eda89358bc1e91ffb03476a4549962854f650
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 241 |
py
|
class Retangulo:
def calcula_perimetro(self):
dy = self.y - self.x
dx = self.x - self.y
return 2*dx + 2*dy
def calcula_area(self):
dy = self.y - self.x
dx = self.x - self.y
return dx*dy
|
[
"[email protected]"
] | |
e8bfec56b5c39e4bd3a759a4a033e6502c721abf
|
e5bc2c2c7ce172bf66cb526e6a27578e2919b807
|
/python/libs/r.py
|
c314af5e861f0aee2ddfd68a42af50f714b87c8b
|
[] |
no_license
|
github188/libs-1
|
c561c3e8875f2fed3351692af62f833585e95511
|
83bfeeb29e9fafdd274ef645d2602f81290fd9e2
|
refs/heads/master
| 2020-05-29T11:43:55.629814 | 2016-03-04T11:11:41 | 2016-03-04T11:11:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 312 |
py
|
#! python
import re
import sys
def MatchString(p,s):
spat = re.compile(p)
if spat.search(s):
print '(%s) match (%s)'%(p,s)
sarr = re.findall(p,s)
print 'match (%s) (%s) (%s)'%(p,s,repr(sarr))
else:
print '(%s) not match (%s)'%(p,s)
if __name__ == '__main__':
MatchString(sys.argv[1],sys.argv[2])
|
[
"[email protected]"
] | |
fdef8858fee95042d8bf62e3cc9b60a763ae012f
|
cd557e3c2b34f30f2e7caf7c79c07ff6e109fbd3
|
/k2/addons/k2-monitor/monitor/__init__.py
|
0ffee09d23f4e86d8b089538069e89f0c4c99707
|
[
"Apache-2.0"
] |
permissive
|
Zenterio/opensourcelib
|
f005174c049df0f5deddc1269d7c343a8e219ca5
|
07f0dabffaceb7b6202b5f691cbad46dac5868a8
|
refs/heads/master
| 2022-12-09T02:53:36.444094 | 2021-04-28T18:03:24 | 2021-05-27T13:14:58 | 186,092,997 | 5 | 6 |
NOASSERTION
| 2022-12-07T23:37:26 | 2019-05-11T05:44:37 |
Groovy
|
UTF-8
|
Python
| false | false | 290 |
py
|
from zaf.messages.message import EndpointId, MessageId
MONITOR_ENDPOINT = EndpointId('monitor', """\
The K2 monitor addon endpoint.
""")
PERFORM_MEASUREMENT = MessageId(
'PERFORM_MEASUREMENT', """
Request that a monitor performs its measurements.
data: None
""")
|
[
"[email protected]"
] | |
66e68b0679a6c7ab9e1e751a07a8086ef46b0705
|
f0d3ef10061147fb3bd04774a8b4eac9e4d9b671
|
/feedly/serializers/cassandra/activity_serializer.py
|
9e1d0ac4a1564c446dc88d23ab34e3b65db56800
|
[
"BSD-3-Clause"
] |
permissive
|
jblomo/Feedly
|
9929077be3364d827aa03c4506ade29b819141cb
|
3e4999cc794231841e3b4909f0a73beabfcca046
|
refs/heads/master
| 2021-01-20T21:19:21.017683 | 2013-09-06T12:33:48 | 2013-09-06T12:33:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 972 |
py
|
from feedly.activity import Activity
from feedly.storage.cassandra.maps import ActivityMap
from feedly.verbs import get_verb_by_id
import pickle
from feedly.serializers.base import BaseSerializer
class CassandraActivitySerializer(BaseSerializer):
def dumps(self, activity):
return ActivityMap(
key=str(activity.serialization_id),
actor=activity.actor_id,
time=activity.time,
verb=activity.verb.id,
object=activity.object_id,
target=activity.target_id,
extra_context=pickle.dumps(activity.extra_context)
)
def loads(self, serialized_activity):
activity_kwargs = serialized_activity.__dict__.copy()
activity_kwargs.pop('key')
activity_kwargs['verb'] = get_verb_by_id(activity_kwargs['verb'])
activity_kwargs['extra_context'] = pickle.loads(
activity_kwargs['extra_context'])
return Activity(**activity_kwargs)
|
[
"[email protected]"
] | |
df16df9b7b4a3e8e07986df56e4f464254235aaa
|
0090756d7a6eb6ab8389ad23b20e89cd68dbd0e4
|
/배열정렬.py
|
2be7e2bad6cda422e8b99414109ecc6efe66010d
|
[] |
no_license
|
ssh6189/2019.12.16
|
5c3093e03ac793d5f0a93cf99e78c6483fcee6d8
|
c1021bb72b3fdc05d7f5e8ae350bbd6eee65b0d3
|
refs/heads/master
| 2020-12-13T19:19:04.558270 | 2020-01-17T08:47:04 | 2020-01-17T08:47:04 | 234,507,219 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 517 |
py
|
import numpy as np
#ndarray 객체는 axis를 기준으로 요소 정렬하는 sort 함수를 제공합니다.
unsorted_arr = np.random.random((3, 3))
print(unsorted_arr)
#데모를 위한 배열 복사
unsorted_arr1 = unsorted_arr.copy()
unsorted_arr2 = unsorted_arr.copy()
unsorted_arr3 = unsorted_arr.copy()
unsorted_arr1.sort() #배열 정렬
print(unsorted_arr1)
unsorted_arr2.sort(axis=0) #배열 정렬, axis=0
print(unsorted_arr2)
unsorted_arr3.sort(axis=1) #배열 정렬, axis=1
print(unsorted_arr3)
|
[
"[email protected]"
] | |
ff864913415b8a2a5a2e635e87aacbbc3c42a60c
|
bd86f45ec9355cf1b76c25307d77c85ff98d30a8
|
/venv/lib/python2.7/site-packages/pygments/lexers/jvm.py
|
76e3c24c7bebc1418c408de54ac34d972ae45c15
|
[
"MIT"
] |
permissive
|
WhySoGeeky/DroidPot
|
fd39abe490117283f992d80f317574f47809de8d
|
7c3d9e975dae3835e2ccf42c425d65b26466e82a
|
refs/heads/master
| 2021-07-02T12:47:16.269514 | 2015-11-03T17:49:41 | 2015-11-03T17:49:41 | 45,484,292 | 6 | 0 |
MIT
| 2021-06-10T17:59:45 | 2015-11-03T17:44:48 |
Python
|
UTF-8
|
Python
| false | false | 66,056 |
py
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.jvm
~~~~~~~~~~~~~~~~~~~
Pygments lexers for JVM languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
this, combined, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
from pygments.util import shebang_matches
from pygments import unistring as uni
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
'PigLexer', 'GoloLexer', 'JasminLexer']
class JavaLexer(RegexLexer):
"""
For `Java <http://www.sun.com/java/>`_ source code.
"""
name = 'Java'
aliases = ['java']
filenames = ['*.java']
mimetypes = ['text/x-java']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
# keywords: go before method names to avoid lexing "throw new XYZ"
# as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
# method names
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+(_+[0-9]+)*L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
class AspectJLexer(JavaLexer):
"""
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
.. versionadded:: 1.6
"""
name = 'AspectJ'
aliases = ['aspectj']
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
aj_keywords = set((
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
))
aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:'))
aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field'))
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
if token is Name and value in self.aj_keywords:
yield index, Keyword, value
elif token is Name.Label and value in self.aj_inter_type:
yield index, Keyword, value[:-1]
yield index, Operator, value[-1]
elif token is Name.Decorator and value in self.aj_inter_type_annotation:
yield index, Keyword, value
else:
yield index, token, value
class ScalaLexer(RegexLexer):
"""
For `Scala <http://www.scala-lang.org>`_ source code.
"""
name = 'Scala'
aliases = ['scala']
filenames = ['*.scala']
mimetypes = ['text/x-scala']
flags = re.MULTILINE | re.DOTALL
# don't use raw unicode strings!
op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter)
tokens = {
'root': [
# method names
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
(u"'%s" % idrest, Text.Symbol),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(u'@%s' % idrest, Name.Decorator),
(u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
u'lazy|match|new|override|pr(?:ivate|otected)'
u'|re(?:quires|turn)|s(?:ealed|uper)|'
u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
(u':(?!%s)' % op, Keyword, 'type'),
(u'%s%s\\b' % (upper, idrest), Name.Class),
(r'(true|false|null)\b', Keyword.Constant),
(r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
(r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'[fs]"""', String, 'interptriplestring'), # interpolated strings
(r'[fs]"', String, 'interpstring'), # interpolated strings
(r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
# Name.Attribute)),
(idrest, Name),
(r'`[^`]+`', Name),
(r'\[', Operator, 'typeparam'),
(r'[(){};,.#]', Operator),
(op, Operator),
(r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
bygroups(Name.Class, Text, Operator), 'typeparam'),
(r'\s+', Text),
(r'\{', Operator, '#pop'),
(r'\(', Operator, '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
],
'type': [
(r'\s+', Text),
(r'<[%:]|>:|[#_]|forSome|type', Keyword),
(u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
(r'[({]', Operator, '#push'),
(u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
(u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text), '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'typeparam': [
(r'[\s,]+', Text),
(u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
(r'([\])}])', Operator, '#pop'),
(r'[(\[{]', Operator, '#push'),
(u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'import': [
(u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
],
'interpstringcommon': [
(r'[^"$\\]+', String),
(r'\$\$', String),
(r'\$' + letter_letter_digit, String.Interpol),
(r'\$\{', String.Interpol, 'interpbrace'),
(r'\\.', String),
],
'interptriplestring': [
(r'"""(?!")', String, '#pop'),
(r'"', String),
include('interpstringcommon'),
],
'interpstring': [
(r'"', String, '#pop'),
include('interpstringcommon'),
],
'interpbrace': [
(r'\}', String.Interpol, '#pop'),
(r'\{', String.Interpol, '#push'),
include('root'),
],
}
class GosuLexer(RegexLexer):
"""
For Gosu source code.
.. versionadded:: 1.5
"""
name = 'Gosu'
aliases = ['gosu']
filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
mimetypes = ['text/x-gosu']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
r'index|while|do|continue|break|return|try|catch|finally|this|'
r'throw|new|switch|case|default|eval|super|outer|classpath|'
r'using)\b', Keyword),
(r'(var|delegate|construct|function|private|internal|protected|'
r'public|abstract|override|final|static|extends|transient|'
r'implements|represents|readonly)\b', Keyword.Declaration),
(r'(property\s+)(get|set)?', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
(r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(uses)(\s+)([\w.]+\*?)',
bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'"', String, 'string'),
(r'(\??[.#])([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'(:)([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_$]\w*', Name),
(r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\n', Text)
],
'templateText': [
(r'(\\<)|(\\\$)', String),
(r'(<%@\s+)(extends|params)',
bygroups(Operator, Name.Decorator), 'stringTemplate'),
(r'<%!--.*?--%>', Comment.Multiline),
(r'(<%)|(<%=)', Operator, 'stringTemplate'),
(r'\$\{', Operator, 'stringTemplateShorthand'),
(r'.', String)
],
'string': [
(r'"', String, '#pop'),
include('templateText')
],
'stringTemplate': [
(r'"', String, 'string'),
(r'%>', Operator, '#pop'),
include('root')
],
'stringTemplateShorthand': [
(r'"', String, 'string'),
(r'\{', Operator, 'stringTemplateShorthand'),
(r'\}', Operator, '#pop'),
include('root')
],
}
class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
.. versionadded:: 1.5
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
stack = ['templateText']
for item in lexer.get_tokens_unprocessed(text, stack):
yield item
class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
.. versionadded:: 1.5
"""
name = 'Groovy'
aliases = ['groovy']
filenames = ['*.groovy']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# Groovy allows a file to start with a shebang
(r'#!(.*?)$', Comment.Preproc, 'base'),
default('base'),
],
'base': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'""".*?"""', String.Double),
(r"'''.*?'''", String.Single),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\"|[^/])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
def analyse_text(text):
return shebang_matches(text, r'groovy')
class IokeLexer(RegexLexer):
"""
For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
prototype based programming language) source.
.. versionadded:: 1.4
"""
name = 'Ioke'
filenames = ['*.ik']
aliases = ['ioke', 'ik']
mimetypes = ['text/x-iokesrc']
tokens = {
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
(r'#\{', Punctuation, 'textInterpolationRoot')
],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
],
'textInterpolationRoot': [
(r'\}', Punctuation, '#pop'),
include('root')
],
'slashRegexp': [
(r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
],
'squareRegexp': [
(r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
],
'root': [
(r'\n', Text),
(r'\s+', Text),
# Comments
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
# Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
# Symbols
(r':[\w!:?]+', String.Symbol),
(r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\"|[^"])*"', String.Symbol),
# Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
# Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
# Mimic
(r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
# Assignment
(r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
# keywords
(r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
r'with)(?![\w!:?])', Keyword.Reserved),
# Origin
(r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
# Base
(r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
r'(?![\w!:?])', Keyword),
# Ground
(r'(stackTraceAsText)(?![\w!:?])', Keyword),
# DefaultBehaviour Literals
(r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
r'(?![\w!:?])', Keyword),
# DefaultBehaviour Aspects
(r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour
(r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
r'(?![\w!:?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
# DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
r'(?![\w!:?])', Keyword),
# DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
r'(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
r'(?![\w!:?])', Keyword.Reserved),
# constants
(r'(nil|false|true)(?![\w!:?])', Name.Constant),
# names
(r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
r'Conditions|Definitions|FlowControl|Internal|Literals|'
r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
# functions
(u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
u'(?![\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
(r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'-?\d+', Number.Integer),
(r'#\(', Punctuation),
# Operators
(r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
(r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
Operator),
# Punctuation
(r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
# kinds
(r'[A-Z][\w!:?]*', Name.Class),
# default cellnames
(r'[a-z_][\w!:?]*', Name)
]
}
class ClojureLexer(RegexLexer):
"""
Lexer for `Clojure <http://clojure.org/>`_ source code.
.. versionadded:: 0.11
"""
name = 'Clojure'
aliases = ['clojure', 'clj']
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
)
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
declarations = (
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
)
builtins = (
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
'butlast', 'byte', 'cast', 'char', 'children', 'class',
'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
tokens = {
'root': [
# the comments - always starting with semicolon
# and going to the end of the line
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
(r'[,\s]+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0x-?[abcdef\d]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
# keywords
(r'::?#?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
(words(special_forms, suffix=' '), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
(words(declarations, suffix=' '), Keyword.Declaration),
# highlight the builtins
(words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# Clojure accepts vector notation
(r'(\[|\])', Punctuation),
# Clojure accepts map notation
(r'(\{|\})', Punctuation),
# the famous parentheses!
(r'(\(|\))', Punctuation),
],
}
class ClojureScriptLexer(ClojureLexer):
"""
Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
source code.
.. versionadded:: 2.0
"""
name = 'ClojureScript'
aliases = ['clojurescript', 'cljs']
filenames = ['*.cljs']
mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
class TeaLangLexer(RegexLexer):
"""
For `Tea <http://teatrove.org/>`_ source code. Only used within a
TeaTemplateLexer.
.. versionadded:: 1.5
"""
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w\.]*', Name.Decorator),
(r'(and|break|else|foreach|if|in|not|or|reverse)\b',
Keyword),
(r'(as|call|define)\b', Keyword.Declaration),
(r'(true|false|null)\b', Keyword.Constant),
(r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r'\'(\\\\|\\\'|[^\'])*\'', String),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_\$]\w*', Name),
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'template': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
class CeylonLexer(RegexLexer):
"""
For `Ceylon <http://ceylon-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Ceylon'
aliases = ['ceylon']
filenames = ['*.ceylon']
mimetypes = ['text/x-ceylon']
flags = re.MULTILINE | re.DOTALL
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'(variable|shared|abstract|doc|by|formal|actual|late|native)',
Name.Decorator),
(r'(break|case|catch|continue|default|else|finally|for|in|'
r'variable|if|return|switch|this|throw|try|while|is|exists|dynamic|'
r'nonempty|then|outer|assert)\b', Keyword),
(r'(abstracts|extends|satisfies|adapts|'
r'super|given|of|out|assign|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(function|value|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'".*``.*``.*"', String.Interpol),
(r'(\.)([a-z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
(r'#[0-9a-fA-F]+', Number.Hex),
(r'\$([01]{4})(_[01]{4})+', Number.Bin),
(r'\$[01]+', Number.Bin),
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
(r'[0-9]+[kMGTP]?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[A-Za-z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[a-z][\w.]*',
Name.Namespace, '#pop')
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
}
class KotlinLexer(RegexLexer):
"""
For `Kotlin <http://kotlin.jetbrains.org/>`_
source code.
.. versionadded:: 1.5
"""
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt']
mimetypes = ['text/x-kotlin']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc') + ']*')
kt_id = '(' + kt_name + '|`' + kt_name + '`)'
tokens = {
'root': [
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'::|!!|\?[:.]', Operator),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'(class)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
(r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
(r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
(r'(abstract|annotation|as|break|by|catch|class|continue|do|else|'
r'enum|false|final|finally|for|fun|get|if|import|in|inner|'
r'internal|is|null|object|open|out|override|package|private|'
r'protected|public|reified|return|set|super|this|throw|trait|'
r'true|try|type|val|var|vararg|when|where|while|This)\b', Keyword),
(kt_id, Name),
],
'package': [
(r'\S+', Name.Namespace, '#pop')
],
'class': [
(kt_id, Name.Class, '#pop')
],
'property': [
(kt_id, Name.Property, '#pop')
],
'function': [
(kt_id, Name.Function, '#pop')
],
}
class XtendLexer(RegexLexer):
"""
For `Xtend <http://xtend-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Xtend'
aliases = ['xtend']
filenames = ['*.xtend']
mimetypes = ['text/x-xtend']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_$][\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
Keyword),
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
(u'(\u00BB)', String, 'template'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'(\\\\|\\'|[^'])*'", String),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
'template': [
(r"'''", String, '#pop'),
(u'\u00AB', String, '#pop'),
(r'.', String)
],
}
class PigLexer(RegexLexer):
"""
For `Pig Latin <https://pig.apache.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Pig'
aliases = ['pig']
filenames = ['*.pig']
mimetypes = ['text/x-pig']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'--.*', Comment),
(r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'\\\n', Text),
(r'\\', Text),
(r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
include('keywords'),
include('types'),
include('builtins'),
include('punct'),
include('operators'),
(r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text),
(r'([a-z_]\w*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[()#:]', Text),
(r'[^(:#\'")\s]+', Text),
(r'\S+\s+', Text) # TODO: make tests pass without \s+
],
'keywords': [
(r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
r'stream|through|union|using|void)\b', Keyword)
],
'builtins': [
(r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
r'TOKENIZE)\b', Name.Builtin)
],
'types': [
(r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
r'int|long|tuple)\b', Keyword.Type)
],
'punct': [
(r'[;(){}\[\]]', Punctuation),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class GoloLexer(RegexLexer):
"""
For `Golo <http://golo-lang.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Golo'
filenames = ['*.golo']
aliases = ['golo']
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*$', Comment),
(r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
Operator),
(r'(?<=[^-])(-)(?=[^-])', Operator),
(r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
(r'[]{}|(),[]', Punctuation),
(r'(module|import)(\s+)',
bygroups(Keyword.Namespace, Text),
'modname'),
(r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
(r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
(r'(let|var)(\s+)',
bygroups(Keyword.Declaration, Text),
'varname'),
(r'(struct)(\s+)',
bygroups(Keyword.Declaration, Text),
'structname'),
(r'(function)(\s+)',
bygroups(Keyword.Declaration, Text),
'funcname'),
(r'(null|true|false)\b', Keyword.Constant),
(r'(augment|pimp'
r'|if|else|case|match|return'
r'|case|when|then|otherwise'
r'|while|for|foreach'
r'|try|catch|finally|throw'
r'|local'
r'|continue|break)\b', Keyword),
(r'(map|array|list|set|vector|tuple)(\[)',
bygroups(Name.Builtin, Punctuation)),
(r'(print|println|readln|raise|fun'
r'|asInterfaceInstance)\b', Name.Builtin),
(r'(`?[a-zA-Z_][\w$]*)(\()',
bygroups(Name.Function, Punctuation)),
(r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
(r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'-?\d[\d_]*L', Number.Integer.Long),
(r'-?\d[\d_]*', Number.Integer),
('`?[a-zA-Z_][\w$]*', Name),
(r'@[a-zA-Z_][\w$.]*', Name.Decorator),
(r'"""', String, combined('stringescape', 'triplestring')),
(r'"', String, combined('stringescape', 'doublestring')),
(r"'", String, combined('stringescape', 'singlestring')),
(r'----((.|\n)*?)----', String.Doc)
],
'funcname': [
(r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
],
'modname': [
(r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
],
'structname': [
(r'`?[\w.]+\*?', Name.Class, '#pop')
],
'varname': [
(r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
],
'string': [
(r'[^\\\'"\n]+', String),
(r'[\'"\\]', String)
],
'stringescape': [
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
],
'triplestring': [
(r'"""', String, '#pop'),
include('string'),
(r'\n', String),
],
'doublestring': [
(r'"', String.Double, '#pop'),
include('string'),
],
'singlestring': [
(r"'", String, '#pop'),
include('string'),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class JasminLexer(RegexLexer):
"""
For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
.. versionadded:: 2.0
"""
name = 'Jasmin'
aliases = ['jasmin', 'jasminxt']
filenames = ['*.j']
_whitespace = r' \n\t\r'
_ws = r'(?:[%s]+)' % _whitespace
_separator = r'%s:=' % _whitespace
_break = r'(?=[%s]|$)' % _separator
_name = r'[^%s]+' % _separator
_unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
tokens = {
'default': [
(r'\n', Text, '#pop'),
(r"'", String.Single, ('#pop', 'quote')),
(r'"', String.Double, 'string'),
(r'=', Punctuation),
(r':', Punctuation, 'label'),
(_ws, Text),
(r';.*', Comment.Single),
(r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
(r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
(r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
(r'\$%s' % _name, Name.Variable),
# Directives
(r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
(r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
r'annotation|bridge|class|default|enum|field|final|fpstrict|'
r'interface|native|private|protected|public|signature|static|'
r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
Keyword.Reserved),
(r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
(r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
r'invisibleparam|outer|visible|visibleparam)%s' % _break,
Keyword.Reserved, 'class/convert-dots'),
(r'\.field%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'field')),
(r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
'no-verification'),
(r'\.method%s' % _break, Keyword.Reserved, 'method'),
(r'\.set%s' % _break, Keyword.Reserved, 'var'),
(r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
(r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
(r'is%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'var')),
(r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
(r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
# Instructions
(words((
'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
(r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
Keyword.Reserved, 'class/no-dots'),
(r'invoke(dynamic|interface|nonvirtual|special|'
r'static|virtual)%s' % _break, Keyword.Reserved,
'invocation'),
(r'(getfield|putfield)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'field')),
(r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'static')),
(words((
'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
'ifnull', 'jsr', 'jsr_w'), suffix=_break),
Keyword.Reserved, 'label'),
(r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
'descriptor/convert-dots'),
(r'tableswitch%s' % _break, Keyword.Reserved, 'table')
],
'quote': [
(r"'", String.Single, '#pop'),
(r'\\u[\da-fA-F]{4}', String.Escape),
(r"[^'\\]+", String.Single)
],
'string': [
(r'"', String.Double, '#pop'),
(r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
String.Escape),
(r'[^"\\]+', String.Double)
],
'root': [
(r'\n+', Text),
(r"'", String.Single, 'quote'),
include('default'),
(r'(%s)([ \t\r]*)(:)' % _name,
bygroups(Name.Label, Text, Punctuation)),
(_name, String.Other)
],
'annotation': [
(r'\n', Text, ('#pop', 'annotation-body')),
(r'default%s' % _break, Keyword.Reserved,
('#pop', 'annotation-default')),
include('default')
],
'annotation-body': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
],
'annotation-default': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
default(('annotation-items', 'descriptor/no-dots'))
],
'annotation-items': [
(r"'", String.Single, 'quote'),
include('default'),
(_name, String.Other)
],
'caught-exception': [
(r'all%s' % _break, Keyword, '#pop'),
include('exception')
],
'class/convert-dots': [
include('default'),
(r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'class/no-dots': [
include('default'),
(r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'descriptor/convert-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptor/no-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptors/convert-dots': [
(r'\)', Punctuation, '#pop'),
default('descriptor/convert-dots')
],
'enclosing-method': [
(_ws, Text),
(r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
default(('#pop', 'class/convert-dots'))
],
'exception': [
include('default'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Exception), '#pop')
],
'field': [
(r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
'#pop')
],
'invocation': [
include('default'),
(r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'label': [
include('default'),
(_name, Name.Label, '#pop')
],
'method': [
include('default'),
(r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'no-verification': [
(r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
include('default')
],
'static': [
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
],
'table': [
(r'\n+', Text),
(r'default%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, Name.Label)
],
'var': [
include('default'),
(_name, Name.Variable, '#pop')
],
'verification': [
include('default'),
(r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
_break, Keyword, '#pop'),
(r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
(r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
]
}
def analyse_text(text):
score = 0
if re.search(r'^\s*\.class\s', text, re.MULTILINE):
score += 0.5
if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
score += 0.3
if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
r'inner|interface|limit|set|signature|stack)\b', text,
re.MULTILINE):
score += 0.6
return score
|
[
"[email protected]"
] | |
992d39d8e5c5649b6954e1bd952fb77fbc4f0cb5
|
768c3fd42e0d3b407d89ccd9a3b3ace9eb0414c5
|
/user/migrations/0004_city.py
|
5c51ffa30bf5c417f1dc9f699aad8d54ad519165
|
[] |
no_license
|
samkayz/LocationAPi
|
a644a45c6eb4ba6fb198b9992b5b79a89d6d9960
|
e7d601467e73ab127c61be257c2354dcd3aee21c
|
refs/heads/master
| 2023-08-14T03:33:32.574732 | 2020-05-03T18:38:08 | 2020-05-03T18:38:08 | 260,606,040 | 0 | 0 | null | 2021-09-22T18:57:32 | 2020-05-02T03:27:12 |
JavaScript
|
UTF-8
|
Python
| false | false | 761 |
py
|
# Generated by Django 3.0.5 on 2020-05-02 02:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0003_auto_20200430_1954'),
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(max_length=100)),
('lat', models.CharField(max_length=100)),
('lng', models.CharField(max_length=100)),
('state', models.CharField(max_length=100)),
],
options={
'db_table': 'city',
},
),
]
|
[
"[email protected]"
] | |
3d294da817dabde94948c630eda7e6f79b1cf950
|
233f97c6f360d478bf975016dd9e9c2be4a64adb
|
/guvi_4_1_3.py
|
00eb925ae9c76c8511522e660cf1e63382c44343
|
[] |
no_license
|
unknownboyy/GUVI
|
3dbd1bb2bc6b3db52f5f79491accd6c56a2dec45
|
d757dd473c4f5eef526a516cf64a1757eb235869
|
refs/heads/master
| 2020-03-27T00:07:12.449280 | 2019-03-19T12:57:03 | 2019-03-19T12:57:03 | 145,595,379 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 418 |
py
|
s1,s2=input().split()
column=len(s1)+1
row=len(s2)+1
c=[[0 for i in range(column)] for i in range(row)]
for i in range(1,row):
c[i][0]=i
for i in range(1,column):
c[0][i]=i
for i in range(1,row):
for j in range(1,column):
if s2[i-1]==s1[j-1]:
c[i][j]=min(c[i-1][j-1],c[i-1][j],c[i][j-1])
else:
c[i][j]=min(c[i-1][j-1],c[i-1][j],c[i][j-1])+1
print(c[row-1][column-1])
|
[
"[email protected]"
] | |
12302780bb6c2667bc5932b57d681745fe7155e5
|
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
|
/python/python_27647.py
|
ff8f8287a5c0a51a03fcb76d8570639dde3aba61
|
[] |
no_license
|
AK-1121/code_extraction
|
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
|
5297a4a3aab3bb37efa24a89636935da04a1f8b6
|
refs/heads/master
| 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 108 |
py
|
# ValidationError in Django Model
kwargs['time'] = datetime.datetime.strptime(v, 'YOUR_TIME_FORMAT').time()
|
[
"[email protected]"
] | |
0cc0ef34f5666bda22936a734d144ace9100a9b7
|
9d123c6b87b0baf80a6fce070023e19d68048b90
|
/slothql/utils/case.py
|
bb1ab8d97d84957c3449f009eb68fe74385f063f
|
[
"MIT"
] |
permissive
|
IndioInc/slothql
|
ea4da3727cb974360eeb3b38517ead4328687e81
|
64a574013e249968746044555bd8779ac353b13f
|
refs/heads/master
| 2021-05-08T11:07:34.420797 | 2018-04-14T02:08:55 | 2018-04-14T02:08:55 | 119,881,523 | 2 | 0 |
MIT
| 2018-04-15T01:31:10 | 2018-02-01T19:16:50 |
Python
|
UTF-8
|
Python
| false | false | 527 |
py
|
import re
CAMELCASE_SNAKE_REGEX = re.compile(r'([a-z\d])([A-Z])')
def snake_to_camelcase(string: str) -> str:
first_char = next((i for i, c in enumerate(string) if c != '_'), len(string))
prefix, suffix = string[:first_char], string[first_char:]
words = [i or '_' for i in suffix.split('_')] if suffix else []
return prefix + ''.join(word.title() if i else word for i, word in enumerate(words))
def camelcase_to_snake(string: str) -> str:
return re.sub(CAMELCASE_SNAKE_REGEX, r'\1_\2', string).lower()
|
[
"[email protected]"
] | |
6e94648de2944ea3fae80b2f53a1364afd58c094
|
82da0dd86f0d8bbd526578f1a5252955bb2cc63b
|
/testClient.py
|
57082ea4c12edf7776d4b22583c80de163542678
|
[] |
no_license
|
guldfisk/HexCG
|
a200a4790782fc91147da8342300cb618bdcb0c6
|
435511a8e61656baa8f7addb8f64128977033349
|
refs/heads/master
| 2021-01-12T06:35:58.738660 | 2017-04-27T21:24:15 | 2017-04-27T21:24:15 | 77,392,407 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 956 |
py
|
import drawSurface
import os
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
class MainView(QtWidgets.QWidget):
def __init__(self, parent=None):
super(MainView, self).__init__(parent)
botsplitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
botsplitter.addWidget(drawSurface.DrawSurface())
botsplitter.addWidget(drawSurface.DrawSurface())
topsplitter = QtWidgets.QSplitter(QtCore.Qt.Vertical)
topsplitter.addWidget(botsplitter)
vbox = QtWidgets.QVBoxLayout(self)
vbox.addWidget(topsplitter)
self.setLayout(vbox)
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(MainWindow,self).__init__(parent)
self.mainview = MainView()
self.setCentralWidget(self.mainview)
self.setWindowTitle('Test Hex Client')
self.setGeometry(300, 300, 300, 200)
def test():
app=QtWidgets.QApplication(sys.argv)
w=MainWindow()
w.show()
sys.exit(app.exec_())
if __name__=='__main__': test()
|
[
"[email protected]"
] | |
82c30f1ac576e4d5f43336166d9b2aa053797c7c
|
6be845bf70a8efaf390da28c811c52b35bf9e475
|
/windows/Resources/Dsz/PyScripts/Lib/dsz/mca_dsz/file/cmd/get/type_Result.py
|
e6ebf7d8de0e5bc766dae92b2644b9a4e230863d
|
[] |
no_license
|
kyeremalprime/ms
|
228194910bf2ed314d0492bc423cc687144bb459
|
47eea098ec735b2173ff0d4e5c493cb8f04e705d
|
refs/heads/master
| 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,656 |
py
|
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: type_Result.py
from types import *
import mcl.object.MclTime
import array
RESULT_FLAG_IGNORED_DUE_TO_FILESIZE = 1
class ResultFileInfo:
def __init__(self):
self.__dict__['index'] = 0
self.__dict__['fileSize'] = 0
self.__dict__['createTime'] = mcl.object.MclTime.MclTime()
self.__dict__['accessTime'] = mcl.object.MclTime.MclTime()
self.__dict__['modifyTime'] = mcl.object.MclTime.MclTime()
self.__dict__['openStatus'] = 0
self.__dict__['offset'] = 0
self.__dict__['name'] = ''
self.__dict__['flags'] = 0
def __getattr__(self, name):
if name == 'index':
return self.__dict__['index']
if name == 'fileSize':
return self.__dict__['fileSize']
if name == 'createTime':
return self.__dict__['createTime']
if name == 'accessTime':
return self.__dict__['accessTime']
if name == 'modifyTime':
return self.__dict__['modifyTime']
if name == 'openStatus':
return self.__dict__['openStatus']
if name == 'offset':
return self.__dict__['offset']
if name == 'name':
return self.__dict__['name']
if name == 'flags':
return self.__dict__['flags']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'index':
self.__dict__['index'] = value
elif name == 'fileSize':
self.__dict__['fileSize'] = value
elif name == 'createTime':
self.__dict__['createTime'] = value
elif name == 'accessTime':
self.__dict__['accessTime'] = value
elif name == 'modifyTime':
self.__dict__['modifyTime'] = value
elif name == 'openStatus':
self.__dict__['openStatus'] = value
elif name == 'offset':
self.__dict__['offset'] = value
elif name == 'name':
self.__dict__['name'] = value
elif name == 'flags':
self.__dict__['flags'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddU32(MSG_KEY_RESULT_FILE_INFO_INDEX, self.__dict__['index'])
submsg.AddU64(MSG_KEY_RESULT_FILE_INFO_FILE_SIZE, self.__dict__['fileSize'])
submsg.AddTime(MSG_KEY_RESULT_FILE_INFO_CREATE_TIME, self.__dict__['createTime'])
submsg.AddTime(MSG_KEY_RESULT_FILE_INFO_ACCESS_TIME, self.__dict__['accessTime'])
submsg.AddTime(MSG_KEY_RESULT_FILE_INFO_MODIFY_TIME, self.__dict__['modifyTime'])
submsg.AddU32(MSG_KEY_RESULT_FILE_INFO_OPEN_STATUS, self.__dict__['openStatus'])
submsg.AddS64(MSG_KEY_RESULT_FILE_INFO_OFFSET, self.__dict__['offset'])
submsg.AddStringUtf8(MSG_KEY_RESULT_FILE_INFO_NAME, self.__dict__['name'])
submsg.AddU16(MSG_KEY_RESULT_FILE_INFO_FLAGS, self.__dict__['flags'])
mmsg.AddMessage(MSG_KEY_RESULT_FILE_INFO, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_RESULT_FILE_INFO, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['index'] = submsg.FindU32(MSG_KEY_RESULT_FILE_INFO_INDEX)
self.__dict__['fileSize'] = submsg.FindU64(MSG_KEY_RESULT_FILE_INFO_FILE_SIZE)
self.__dict__['createTime'] = submsg.FindTime(MSG_KEY_RESULT_FILE_INFO_CREATE_TIME)
self.__dict__['accessTime'] = submsg.FindTime(MSG_KEY_RESULT_FILE_INFO_ACCESS_TIME)
self.__dict__['modifyTime'] = submsg.FindTime(MSG_KEY_RESULT_FILE_INFO_MODIFY_TIME)
self.__dict__['openStatus'] = submsg.FindU32(MSG_KEY_RESULT_FILE_INFO_OPEN_STATUS)
self.__dict__['offset'] = submsg.FindS64(MSG_KEY_RESULT_FILE_INFO_OFFSET)
self.__dict__['name'] = submsg.FindString(MSG_KEY_RESULT_FILE_INFO_NAME)
self.__dict__['flags'] = submsg.FindU16(MSG_KEY_RESULT_FILE_INFO_FLAGS)
class ResultData:
def __init__(self):
self.__dict__['index'] = 0
self.__dict__['buffer'] = array.array('B')
def __getattr__(self, name):
if name == 'index':
return self.__dict__['index']
if name == 'buffer':
return self.__dict__['buffer']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'index':
self.__dict__['index'] = value
elif name == 'buffer':
self.__dict__['buffer'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddU32(MSG_KEY_RESULT_DATA_INDEX, self.__dict__['index'])
submsg.AddData(MSG_KEY_RESULT_DATA_BUFFER, self.__dict__['buffer'])
mmsg.AddMessage(MSG_KEY_RESULT_DATA, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_RESULT_DATA, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['index'] = submsg.FindU32(MSG_KEY_RESULT_DATA_INDEX)
self.__dict__['buffer'] = submsg.FindData(MSG_KEY_RESULT_DATA_BUFFER)
class ResultDone:
def __init__(self):
self.__dict__['index'] = 0
def __getattr__(self, name):
if name == 'index':
return self.__dict__['index']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'index':
self.__dict__['index'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddU32(MSG_KEY_RESULT_DONE_INDEX, self.__dict__['index'])
mmsg.AddMessage(MSG_KEY_RESULT_DONE, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_RESULT_DONE, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['index'] = submsg.FindU32(MSG_KEY_RESULT_DONE_INDEX)
|
[
"[email protected]"
] | |
8ba65c3c7211433bf61d3a399af108469c4e73d0
|
ee974d693ca4c4156121f8cb385328b52eaac07c
|
/env/lib/python3.6/site-packages/setuptools/sandbox.py
|
2ed7a0bf7fe00623e3280c9c013f6ef5a25e57e6
|
[] |
no_license
|
ngonhi/Attendance_Check_System_with_Face_Recognition
|
f4531cc4dee565d0e45c02217f73f3eda412b414
|
92ff88cbc0c740ad48e149033efd38137c9be88d
|
refs/heads/main
| 2023-03-12T07:03:25.302649 | 2021-02-26T15:37:33 | 2021-02-26T15:37:33 | 341,493,686 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 130 |
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:48ec4eb0b34103d21dc9a1544c6daed3406c040dc389ef8a07380ec677ad2ecf
size 14767
|
[
"Nqk180998!"
] |
Nqk180998!
|
1f0274c73fe51e1f7184f82248d76deb389cbc77
|
08d99e1d2d8dc2adbfea957855279c6ed62f9a5b
|
/Udemy-kurs-zaawansowany/sekcja_4/4.58_returing_function_L.py
|
e9e085670c87d5208d149c3c816435631b39d2c0
|
[] |
no_license
|
rucpata/udemy-python-zaawansowany
|
23f6202edea8879f5a0ca24800908e11af59486e
|
597de3ceca723b799e1b31d13552bbb2c9d57a74
|
refs/heads/master
| 2022-03-15T20:48:36.076232 | 2019-12-09T14:19:09 | 2019-12-09T14:19:09 | 218,304,905 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,167 |
py
|
from datetime import datetime
def time_span_m(start, end):
duration = end - start
duration_in_s = duration.total_seconds()
return divmod(duration_in_s, 60)[0]
def time_span_h(start, end):
duration = end - start
duration_in_s = duration.total_seconds()
return divmod(duration_in_s, 3600)[0]
def time_span_d(start, end):
duration = end - start
duration_in_s = duration.total_seconds()
return divmod(duration_in_s, 86400)[0]
start = datetime(2019, 1, 1, 0, 0, 0)
end = datetime.now()
print(time_span_m(start, end))
print(time_span_h(start, end))
print(time_span_d(start, end))
print('-'*60)
def create_function(span):
if span == 'm':
sec = 60
elif span == 'h':
sec = 3600
elif span == 'd':
sec = 86400
source = '''
def f(start, end):
duration = end - start
duration_in_s = duration.total_seconds()
return divmod(duration_in_s, {})[0]
'''.format(sec)
exec(source, globals())
return f
f_minutes = create_function('m')
f_hours = create_function('h')
f_days = create_function('d')
print(f_minutes(start, end))
print(f_hours(start, end))
print(f_days(start, end))
|
[
"[email protected]"
] | |
70524c923ce78ea4d4b851eabd7b02424b38a96b
|
03d55aab8ec79c8c7deb6ef94c20712fb0108b2c
|
/Statistics_is_Easy_Codes/Diff2MeanConfCorr.py
|
060b1ace90ea061424a36f34515e5bdbe63cea91
|
[] |
no_license
|
kxu68/BMSE
|
8894a006b574f987ec65ac831d7aef3169402725
|
2722d9f1e0785b36ac00ccc576c2e6da8d6280cb
|
refs/heads/master
| 2023-05-19T05:47:03.911811 | 2021-06-10T20:46:23 | 2021-06-10T20:46:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,664 |
py
|
#!/usr/bin/python
######################################
# Difference between Two Means Bias Corrected Confidence Interval
# From: Statistics is Easy! By Dennis Shasha and Manda Wilson
#
# Uses shuffling & bootstrapping to get a 90% confidence interval for the difference between two means.
# The confidence interval is computed using Efron's bias corrected method.
#
# Author: Manda Wilson
#
# Example of FASTA formatted input file (this is only for 2 groups):
# >placebo_vals
# 54 51 58 44 55 52 42 47 58 46
# >drug_vals
# 54 73 53 70 73 68 52 65 65
#
# Pseudocode:
#
# 1. Measure the difference between the two group means. The difference in means is measured
# by sum(grpA) / len(grpA) - sum(grpB) / len(grpB). In this example the difference between
# the two group means is 12.97.
#
# 2. Do the following 10,000 times:
# a. For each sample we take a bootstrap sample:
# i. Create a new array of the same size as the original sample
# ii. Fill the array with randomly picked values from the original sample (uniformly and randomly picked with replacement)
# b. Measure the difference between the two bootstrap group means, just as we did in step (1)
# with the original samples.
#
# 3. Sort the differences computed in step (2).
#
# 4. Compute the size of each interval tail. If we want a 90% confidence interval, then 1 - 0.9 yields the
# portion of the interval in the tails. We divide this by 2 to get the size of each tail, in this case 0.05.
#
# 5. Compute the upper and lower bounds.
# a. Find proportion p of the bootstrap values computed in step (2) that are below the sample estimate
# z_0 = phi^-1(p)
# For example if p = 0.5 then z_0 = 0, if p = 0.5 - 0.3413 then z_0 = -1.
# b. z_alpha/2 = phi^-1(alpha/2). e.g. phi^-1(0.05) when getting the 90% confidence interval (i.e. alpha = 10%)
# c. z_1-alpha/2 = phi^-1(1-alpha/2) e.g. phi^-1(0.95) when getting the 90% confidence interval
# d. the lower end of the alpha confidence interval is phi(z_alpha/2 + 2 z_0) among the bootstrap values
# e. the upper end is phi(z_1-alpha/2 + 2 z_0)
#
# 6. The bootstrap values at the lower bound and upper bound give us our confidence interval.
#
######################################
import random
import math
import sys
######################################
#
# Adjustable variables
#
######################################
input_file = "Diff2Mean.vals"
conf_interval = 0.9
######################################
#
# Subroutines
#
######################################
# maps proportion of values above mean
# to number of standard deviations above mean
# keys will be index / 100 \:[0-9]\.[0-9][0-9]\,
area_to_sd_map = [0.0000, 0.0040, 0.0080, 0.0120, 0.0160, 0.0199, 0.0239, 0.0279, 0.0319, 0.0359, 0.0398, 0.0438, 0.0478, 0.0517, 0.0557, 0.0596, 0.0636, 0.0675, 0.0714, 0.0753, 0.0793, 0.0832, 0.0871, 0.0910, 0.0948, 0.0987, 0.1026, 0.1064, 0.1103, 0.1141, 0.1179, 0.1217, 0.1255, 0.1293, 0.1331, 0.1368, 0.1406, 0.1443, 0.1480, 0.1517, 0.1554, 0.1591, 0.1628, 0.1664, 0.1700, 0.1736, 0.1772, 0.1808, 0.1844, 0.1879, 0.1915, 0.1950, 0.1985, 0.2019, 0.2054, 0.2088, 0.2123, 0.2157, 0.2190, 0.2224, 0.2257, 0.2291, 0.2324, 0.2357, 0.2389, 0.2422, 0.2454, 0.2486, 0.2517, 0.2549, 0.2580, 0.2611, 0.2642, 0.2673, 0.2704, 0.2734, 0.2764, 0.2794, 0.2823, 0.2852, 0.2881, 0.2910, 0.2939, 0.2967, 0.2995, 0.3023, 0.3051, 0.3078, 0.3106, 0.3133, 0.3159, 0.3186, 0.3212, 0.3238, 0.3264, 0.3289, 0.3315, 0.3340, 0.3365, 0.3389, 0.3413, 0.3438, 0.3461, 0.3485, 0.3508, 0.3531, 0.3554, 0.3577, 0.3599, 0.3621, 0.3643, 0.3665, 0.3686, 0.3708, 0.3729, 0.3749, 0.3770, 0.3790, 0.3810, 0.3830, 0.3849, 0.3869, 0.3888, 0.3907, 0.3925, 0.3944, 0.3962, 0.3980, 0.3997, 0.4015, 0.4032, 0.4049, 0.4066, 0.4082, 0.4099, 0.4115, 0.4131, 0.4147, 0.4162, 0.4177, 0.4192, 0.4207, 0.4222, 0.4236, 0.4251, 0.4265, 0.4279, 0.4292, 0.4306, 0.4319, 0.4332, 0.4345, 0.4357, 0.4370, 0.4382, 0.4394, 0.4406, 0.4418, 0.4429, 0.4441, 0.4452, 0.4463, 0.4474, 0.4484, 0.4495, 0.4505, 0.4515, 0.4525, 0.4535, 0.4545, 0.4554, 0.4564, 0.4573, 0.4582, 0.4591, 0.4599, 0.4608, 0.4616, 0.4625, 0.4633, 0.4641, 0.4649, 0.4656, 0.4664, 0.4671, 0.4678, 0.4686, 0.4693, 0.4699, 0.4706, 0.4713, 0.4719, 0.4726, 0.4732, 0.4738, 0.4744, 0.4750, 0.4756, 0.4761, 0.4767, 0.4772, 0.4778, 0.4783, 0.4788, 0.4793, 0.4798, 0.4803, 0.4808, 0.4812, 0.4817, 0.4821, 0.4826, 0.4830, 0.4834, 0.4838, 0.4842, 0.4846, 0.4850, 0.4854, 0.4857, 0.4861, 0.4864, 0.4868, 0.4871, 0.4875, 0.4878, 0.4881, 0.4884, 0.4887, 0.4890, 0.4893, 0.4896, 0.4898, 0.4901, 0.4904, 0.4906, 0.4909, 0.4911, 0.4913, 0.4916, 0.4918, 0.4920, 0.4922, 0.4925, 0.4927, 0.4929, 0.4931, 0.4932, 0.4934, 0.4936, 0.4938, 0.4940, 0.4941, 0.4943, 0.4945, 0.4946, 0.4948, 0.4949, 0.4951, 0.4952, 0.4953, 0.4955, 0.4956, 0.4957, 0.4959, 0.4960, 0.4961, 0.4962, 0.4963, 0.4964, 0.4965, 0.4966, 0.4967, 0.4968, 0.4969, 0.4970, 0.4971, 0.4972, 0.4973, 0.4974, 0.4974, 0.4975, 0.4976, 0.4977, 0.4977, 0.4978, 0.4979, 0.4979, 0.4980, 0.4981, 0.4981, 0.4982, 0.4982, 0.4983, 0.4984, 0.4984, 0.4985, 0.4985, 0.4986, 0.4986, 0.4987, 0.4987, 0.4987, 0.4988, 0.4988, 0.4989, 0.4989, 0.4989, 0.4990, 0.4990]
def sd_to_area(sd):
sign = 1
if sd < 0:
sign = -1
sd = math.fabs(sd) # get the absolute value of sd
index = int(sd * 100)
if len(area_to_sd_map) <= index:
return sign * area_to_sd_map[-1] # return last element in array
if index == (sd * 100):
return sign * area_to_sd_map[index]
return sign * (area_to_sd_map[index] + area_to_sd_map[index + 1]) / 2
def area_to_sd(area):
sign = 1
if area < 0:
sign = -1
area = math.fabs(area)
for a in range(len(area_to_sd_map)):
if area == area_to_sd_map[a]:
return sign * a / 100
if 0 < a and area_to_sd_map[a - 1] < area and area < area_to_sd_map[a]:
# our area is between this value and the previous
# for simplicity, we will just take the sd half way between a - 1 and a
return sign * (a - .5) / 100
return sign * (len(area_to_sd_map) - 1) / 100
def bootstrap(x):
samp_x = []
for i in range(len(x)):
samp_x.append(random.choice(x))
return samp_x
# subtracts group a mean from group b mean and returns result
def meandiff(grpA, grpB):
return sum(grpB) / float(len(grpB)) - sum(grpA) / float(len(grpA))
######################################
#
# Computations
#
######################################
# list of lists
samples = []
a = 0
b = 1
# file must be in FASTA format
infile=open(input_file)
for line in infile:
if line.startswith('>'):
# start of new sample
samples.append([])
elif not line.isspace():
# line must contain values for previous sample
samples[len(samples) - 1] += list(map(float,line.split()))
infile.close()
observed_mean_diff = meandiff(samples[a], samples[b])
num_resamples = 10000 # number of times we will resample from our original samples
num_below_observed = 0 # count the number of bootstrap values below the observed sample statistic
out = [] # will store results of each time we resample
for i in range(num_resamples):
# get bootstrap samples for each of our groups
# then compute our statistic of interest
# append statistic to out
bootstrap_samples = [] # list of lists
for sample in samples:
bootstrap_samples.append(bootstrap(sample))
# now we have a list of new samples, run meandiff
boot_mean_diff = meandiff(bootstrap_samples[a], bootstrap_samples[b])
if boot_mean_diff < observed_mean_diff:
num_below_observed += 1
out.append(boot_mean_diff)
out.sort()
p = num_below_observed / float(num_resamples) # proportion of bootstrap values below the observed value
dist_from_center = p - .5 # if this is negative, the original is below the center, if positive, it is above
z_0 = area_to_sd(dist_from_center)
# now we want to find the proportion that should be between the mean and one of the tails
tail_sds = area_to_sd(conf_interval / 2)
z_alpha_over_2 = 0 - tail_sds
z_1_minus_alpha_over_2 = tail_sds
# in case our lower and upper bounds are not integers,
# we decrease the range (the values we include in our interval),
# so that we can keep the same level of confidence
lower_bound = int(math.ceil(num_resamples * (0.5 + sd_to_area(z_alpha_over_2 + (2 * z_0)))))
upper_bound = int(math.floor(num_resamples * (0.5 + sd_to_area(z_1_minus_alpha_over_2 + (2 * z_0)))))
######################################
#
# Output
#
######################################
print("Observed difference between the means: %.2f" % observed_mean_diff)
print("We have", conf_interval * 100, "% confidence that the true difference between the means", end=' ')
print("is between: %.2f" % out[lower_bound], "and %.2f" % out[upper_bound])
|
[
"[email protected]"
] | |
54befdb3e0a8b30d69a2aeeaa1a1cc346bb4cf05
|
18ca2e0f98b98941ff9d9e098e0be89166c8b87c
|
/Abp/Cp9/c9_4_2_backupToZip.py
|
9fec4c043c7091fcbfe196e42c9554230ca52d3c
|
[] |
no_license
|
masa-k0101/Self-Study_python
|
f20526a9cd9914c9906059678554285bfda0c932
|
72b364ad4da8485a201ebdaaa430fd2e95681b0a
|
refs/heads/master
| 2023-03-07T07:38:27.559606 | 2021-02-22T16:24:47 | 2021-02-22T16:24:47 | 263,381,292 | 1 | 0 | null | 2020-06-09T17:32:06 | 2020-05-12T15:47:48 |
Python
|
UTF-8
|
Python
| false | false | 908 |
py
|
# -*- coding: utf-8 -*-
#! python3
# backupToZip.py - フォルダ全体を連番付きZIPファイルにコピーする
import zipfile, os
def backup_to_zip(folder):
# フォルダ全体をZIPファイルにバックアップする
folder = os.path.abspath(folder)
# 既存のファイル名からファイル名の連番を決める
number = 1
while True:
zip_filename = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zip_filename):
break
number = number + 1
# ZIPファイルを作成する
print('Creating {}...'.format(zip_filename))
backup_zip = zipfile.ZipFile(zip_filename, 'w')
# TODO: フォルダのツリーを渡り歩いてその中のファイルを圧縮する
print('Done')
backup_to_zip('c:\\Study\\python\\Automate the Boring Stuff with Python')
|
[
"[email protected]"
] | |
29dee4c6ed46e6c4f30e6d3f5b852347f06edfa7
|
668e32dea18d0a7dd3884801d773009b207b35d9
|
/api/migrations/0002_profile_phone.py
|
c7d193c54362c88c647621c0abd58f150cf0223d
|
[] |
no_license
|
aviox-git/driss-backend
|
7a1b0759e899354b4dcbcb9e5dd20120667b0c5f
|
8825722c7c3c26896ebb2827075445f364bd2764
|
refs/heads/master
| 2020-06-13T12:35:10.939614 | 2019-10-05T14:33:52 | 2019-10-05T14:33:52 | 194,655,723 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 400 |
py
|
# Generated by Django 2.2.1 on 2019-06-13 14:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='phone',
field=models.BigIntegerField(null=True, verbose_name='Phone Number'),
),
]
|
[
"[email protected]"
] | |
14fec06ce8e0cefe82323f6b61ffa8b906026b8c
|
537b58ea8a1d1fcd961862876662da31efe4880f
|
/django/blog/migrations/0001_initial.py
|
ee37d7ab85daeacc6a7fc1012305eb781ec4eb54
|
[] |
no_license
|
petershan1119/Djangogirls-Tutorial
|
d82e4ecdb6322f9c03dbe4d365087e692c265443
|
2f9bc6a6d0599859cf22d0f315553a5932814b39
|
refs/heads/master
| 2021-05-09T06:16:47.077800 | 2018-02-25T05:30:25 | 2018-02-25T05:30:25 | 119,325,945 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 995 |
py
|
# Generated by Django 2.0.1 on 2018-01-29 07:01
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('content', models.TextField(blank=True)),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"[email protected]"
] | |
c4ac81f2ad3729430ee488e572e843dd780a98fc
|
f04fb8bb48e38f14a25f1efec4d30be20d62388c
|
/牛客Top200/89验证IP地址.py
|
8194a075a514d37ea432617022318981185de087
|
[] |
no_license
|
SimmonsChen/LeetCode
|
d8ef5a8e29f770da1e97d295d7123780dd37e914
|
690b685048c8e89d26047b6bc48b5f9af7d59cbb
|
refs/heads/master
| 2023-09-03T01:16:52.828520 | 2021-11-19T06:37:19 | 2021-11-19T06:37:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,327 |
py
|
#
# 验证IP地址
# @param IP string字符串 一个IP地址字符串
# @return string字符串
#
class Solution:
def isIpv4(self, chs):
ans = 0
for ch in chs:
if not ch.isdigit(): return False
ans = ans * 10 + int(ch)
if ans > 255:
return False
else:
return True
def isIpv6(self, chs):
for ch in chs:
if ch.islower():
if ch > "f": return False
elif ch.isupper():
if ch > "F": return False
return True
def solve(self, IP):
# write code here
if not IP:
return "Neither"
if "." in IP:
arr = IP.split(".")
if len(arr) != 4: return "Neither"
for item in arr:
if item == "" or (len(item) > 1 and item[0] == "0") or len(item) > 3 or not self.isIpv4(item): return "Neither"
return "IPv4"
else:
arr = IP.split(":")
if len(arr) != 8: return "Neither"
for item in arr:
if item == "" or len(item) > 4 or not self.isIpv6(item): return "Neither"
return "IPv6"
if __name__ == '__main__':
s = Solution()
print(s.solve("192.0.0.1"))
# print(s.solve("2001:0db8:85a3:0000:0:8A2E:0370:733a"))
|
[
"[email protected]"
] | |
d8f81028d7f2a386824631eaa7c1a4f7c435a895
|
510b4d4db394191f1e5a6058555c29903c24d8c8
|
/geomat/stein/fields.py
|
36429148fa889e8ab0f43283d62e9e37732fff29
|
[
"BSD-3-Clause"
] |
permissive
|
GeoMatDigital/django-geomat
|
8635735776b924d3ce4d8b2c64b2835d2a6b20d0
|
8c5bc4c9ba9759b58b52ddf339ccaec40ec5f6ea
|
refs/heads/develop
| 2021-09-10T07:19:04.212942 | 2019-10-31T15:56:12 | 2019-10-31T15:56:12 | 45,467,102 | 3 | 0 |
BSD-3-Clause
| 2021-09-07T23:33:48 | 2015-11-03T13:09:05 |
Python
|
UTF-8
|
Python
| false | false | 955 |
py
|
from django import forms
from django.contrib.postgres.fields import ArrayField
class ChoiceArrayField(ArrayField):
"""
A field that allows us to store an array of choices.
Uses Django 1.9's postgres ArrayField
and a MultipleChoiceField for its formfield.
Usage:
choices = ChoiceArrayField(models.CharField(max_length=...,
choices=(...,)),
default=[...])
"""
# Voodoo-voodoo from https://gist.github.com/danni/f55c4ce19598b2b345ef
def formfield(self, **kwargs):
defaults = {
'form_class': forms.MultipleChoiceField,
'choices': self.base_field.choices,
}
defaults.update(kwargs)
# Skip our parent's formfield implementation completely as we don't
# care for it.
# pylint:disable=bad-super-call
return super(ArrayField, self).formfield(**defaults)
|
[
"[email protected]"
] | |
54c3c7e8fe81cb1aab1bd644c8e54b0d2a2a2f5a
|
d2c4934325f5ddd567963e7bd2bdc0673f92bc40
|
/tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_PolyTrend_Seasonal_Second_LSTM.py
|
97fe0297c57bed7d7cbbb7625f8f3d5fea08b058
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
jmabry/pyaf
|
797acdd585842474ff4ae1d9db5606877252d9b8
|
afbc15a851a2445a7824bf255af612dc429265af
|
refs/heads/master
| 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 |
BSD-3-Clause
| 2018-12-17T22:08:12 | 2018-06-12T17:15:43 |
Python
|
UTF-8
|
Python
| false | false | 163 |
py
|
import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['PolyTrend'] , ['Seasonal_Second'] , ['LSTM'] );
|
[
"[email protected]"
] | |
55811f3f06f95917f6dea10ad8ed29ded8e18031
|
f13c586b82224c07f28f7bb7d9dd503e64eb5cb2
|
/tests/optimize/test_lie_algebra_optimizer.py
|
b61bcd59908a9a80b3f48fa6fcbbafd06aab1df4
|
[
"Apache-2.0"
] |
permissive
|
therooler/pennylane
|
095f104e40254be2ed3050bc7be9ea9d2ee11ebd
|
fde1f24bd784d6ee2af5c980c2d5010b4c2bbe54
|
refs/heads/master
| 2023-04-29T13:32:43.115108 | 2023-04-18T09:41:42 | 2023-04-18T09:41:42 | 202,356,685 | 0 | 0 |
Apache-2.0
| 2019-08-14T13:30:39 | 2019-08-14T13:30:38 | null |
UTF-8
|
Python
| false | false | 10,915 |
py
|
# Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the ``LieAlgebraOptimizer``.
"""
import pytest
from scipy.sparse.linalg import expm
import numpy as np
import pennylane as qml
from pennylane.optimize import LieAlgebraOptimizer
def circuit_1():
"""Simple circuit."""
qml.Hadamard(wires=[0])
qml.Hadamard(wires=[1])
def circuit_2():
"""Simply parameterized circuit."""
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
def circuit_3():
"""Three-qubit circuit."""
qml.RY(0.5, wires=[0])
qml.RY(0.6, wires=[1])
qml.RY(0.7, wires=[2])
qml.CNOT(wires=[0, 1])
qml.CNOT(wires=[1, 2])
qml.RX(-0.6, wires=[0])
qml.RX(-0.3, wires=[1])
qml.RX(-0.2, wires=[2])
hamiltonian_1 = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
hamiltonian_2 = qml.Hamiltonian(
coeffs=[-0.2, 0.3, -0.15],
observables=[
qml.PauliY(1),
qml.PauliZ(0) @ qml.PauliZ(1),
qml.PauliX(0) @ qml.PauliX(1),
],
)
hamiltonian_3 = qml.Hamiltonian(
coeffs=[-2.0], observables=[qml.PauliY(0) @ qml.PauliY(1) @ qml.PauliY(2)]
)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_omegas(circuit, hamiltonian):
"""Test that we calculate the Riemannian gradient coefficients Tr{[rho, H] P_j} correctly."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
opt = LieAlgebraOptimizer(circuit=lie_circuit)
ops = opt.get_su_n_operators(None)[0]
omegas_np = []
for op in ops:
op = qml.math.expand_matrix(op.matrix(), op.wires, wires)
omegas_np.append(1j * np.trace(lie_algebra_np @ op))
omegas = opt.get_omegas()
assert np.allclose(omegas, omegas_np)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_omegas_restricted(circuit, hamiltonian):
"""Test that we calculate the (restricted) Riemannian gradient coefficients correctly."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
restriction = qml.Hamiltonian(
coeffs=[1.0] * 3,
observables=[qml.PauliX(0), qml.PauliY(1), qml.PauliY(0) @ qml.PauliY(1)],
)
opt = LieAlgebraOptimizer(circuit=lie_circuit, restriction=restriction)
ops = opt.get_su_n_operators(restriction)[0]
omegas_np = []
for op in ops:
op = qml.math.expand_matrix(op.matrix(), op.wires, wires)
omegas_np.append(1j * np.trace(lie_algebra_np @ op))
omegas = opt.get_omegas()
assert np.allclose(omegas, omegas_np)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_evolution(circuit, hamiltonian):
"""Test that the optimizer produces the correct unitary to append."""
# pylint: disable=no-member
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
wires = range(nqubits)
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def get_state():
circuit()
return qml.state()
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
phi = get_state()
rho = np.outer(phi, phi.conj())
hamiltonian_np = qml.utils.sparse_hamiltonian(hamiltonian, wires).toarray()
lie_algebra_np = hamiltonian_np @ rho - rho @ hamiltonian_np
phi_exact = expm(-0.1 * lie_algebra_np * 2**nqubits) @ phi
rho_exact = np.outer(phi_exact, phi_exact.conj())
opt = LieAlgebraOptimizer(circuit=lie_circuit, stepsize=0.1, exact=True)
opt.step_and_cost()
cost_pl = opt.circuit()
cost_exact = np.trace(rho_exact @ hamiltonian_np)
assert np.allclose(cost_pl, cost_exact, atol=1e-4)
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_step(circuit, hamiltonian):
"""Test that we can take subsequent steps with the optimizer."""
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=lie_circuit)
opt.step()
opt.step()
@pytest.mark.parametrize(
"circuit,hamiltonian",
[
(circuit_1, hamiltonian_1),
(circuit_1, hamiltonian_2),
(circuit_2, hamiltonian_1),
(circuit_2, hamiltonian_2),
(circuit_3, hamiltonian_3),
],
)
def test_lie_algebra_step_trotterstep(circuit, hamiltonian):
"""Test that we can take subsequent steps with the optimizer."""
nqubits = max([max(ps.wires) for ps in hamiltonian.ops]) + 1
dev = qml.device("default.qubit", wires=nqubits)
@qml.qnode(dev)
def lie_circuit():
circuit()
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=lie_circuit, trottersteps=3)
opt.step()
opt.step()
def test_lie_algebra_circuit_input_1_check():
"""Test that a type error is raise for non-QNode circuits."""
def circuit():
qml.RY(0.5, wires=0)
with pytest.raises(TypeError, match="circuit must be a QNode"):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_hamiltonian_input_1_check():
"""Test that a type error is raise for non-QNode circuits."""
@qml.qnode(qml.device("default.qubit", wires=3))
def circuit():
qml.RY(0.5, wires=0)
return qml.state()
with pytest.raises(
TypeError,
match="circuit must return the expectation value of a Hamiltonian",
):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_nqubits_check():
"""Test that we warn if the system is too big."""
@qml.qnode(qml.device("default.qubit", wires=5))
def circuit():
qml.RY(0.5, wires=0)
return qml.expval(qml.Hamiltonian(coeffs=[-1.0], observables=[qml.PauliX(0)]))
with pytest.warns(UserWarning, match="The exact Riemannian gradient is exponentially"):
LieAlgebraOptimizer(circuit=circuit, stepsize=0.001)
def test_lie_algebra_restriction_check():
"""Test that a type error is raise for non-QNode circuits."""
@qml.qnode(qml.device("default.qubit", wires=3))
def circuit():
qml.RY(0.5, wires=0)
return qml.expval(qml.Hamiltonian(coeffs=[-1.0], observables=[qml.PauliX(0)]))
restriction = "not_a_hamiltonian"
with pytest.raises(
TypeError,
match="restriction must be a Hamiltonian",
):
LieAlgebraOptimizer(circuit=circuit, restriction=restriction, stepsize=0.001)
def test_docstring_example():
"""Test the docstring example with Trotterized evolution."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2))
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1)
for _ in range(12):
circuit, cost = opt.step_and_cost()
circuit()
assert np.isclose(cost, -2.236068, atol=1e-3)
def test_docstring_example_exact():
"""Test that the optimizer works with matrix exponential."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2))
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1, exact=True)
for _ in range(12):
circuit, cost = opt.step_and_cost()
circuit()
assert np.isclose(cost, -2.236068, atol=1e-3)
def test_example_shots():
"""Test that the optimizer works with finite shots."""
hamiltonian = qml.Hamiltonian(
coeffs=[-1.0] * 3,
observables=[qml.PauliX(0), qml.PauliZ(1), qml.PauliY(0) @ qml.PauliX(1)],
)
@qml.qnode(qml.device("default.qubit", wires=2, shots=1000), diff_method=None)
def quant_fun():
qml.RX(0.1, wires=[0])
qml.RY(0.5, wires=[1])
qml.CNOT(wires=[0, 1])
qml.RY(0.6, wires=[0])
return qml.expval(hamiltonian)
opt = LieAlgebraOptimizer(circuit=quant_fun, stepsize=0.1, exact=False)
for _ in range(3):
opt.step_and_cost()
|
[
"[email protected]"
] | |
8f8b7378f3e164e8ce802728ec439babb5859ec9
|
45b9beebad2f297486c9c12da537a0e28cbcd597
|
/users/config.py
|
c7d8704328ab790aebaaafcefe3ccfb2dccb3bf3
|
[] |
no_license
|
thinkingserious/flask-microservices-users
|
27a00c3a0e5194a2ab8b7c244365cf343e8b6d57
|
943d0717db72600be590df3df9b8d21e8cf5c4a3
|
refs/heads/master
| 2021-09-08T08:49:48.978016 | 2018-03-08T22:30:19 | 2018-03-08T22:30:19 | 115,350,010 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 645 |
py
|
import os
class BaseConfig:
"""Base configuration"""
DEBUG = False
TESTING = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = 'my_precious'
class DevelopmentConfig(BaseConfig):
"""Development configuration"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
class TestingConfig(BaseConfig):
"""Testing configuration"""
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_TEST_URL')
class ProductionConfig(BaseConfig):
"""Production configuration"""
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
|
[
"[email protected]"
] | |
88c23711f8433fde27a7f539630b6a9d0120f461
|
a4753147801dbabfec45f6f9f47572cda77efb81
|
/debugging-constructs/ibmfl/evidencia/evidence_recorder.py
|
fa7058b58049d01df8a7237e81121f847aabfab1
|
[
"MIT"
] |
permissive
|
SEED-VT/FedDebug
|
e1ec1f798dab603bd208b286c4c094614bb8c71d
|
64ffa2ee2e906b1bd6b3dd6aabcf6fc3de862608
|
refs/heads/main
| 2023-05-23T09:40:51.881998 | 2023-02-13T21:52:25 | 2023-02-13T21:52:25 | 584,879,212 | 8 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,115 |
py
|
"""
Licensed Materials - Property of IBM
Restricted Materials of IBM
20221069
© Copyright IBM Corp. 2022 All Rights Reserved.
"""
"""
Abstract base class for providing evidence for accountability.
"""
from abc import ABC, abstractmethod
class AbstractEvidenceRecorder(ABC):
"""
Class that supports providing evidence of FL actions.
Concrete implementations should act in a black-box fashion
with only the methods below exposed to the caller
"""
def __init__(self, info):
"""
Initializes an `AbstractEvidenceRecorder` object with info.
:param info: info required for this recorder.
:type info: `dict`
"""
self.info = info
@abstractmethod
def add_claim(self, predicate: str, custom_string: str):
"""
Adds a new claim as evidence.
Throws: An exception on failure
:custom_string: a caller provided string, non-empty
"""
"""
We may need to:
1) enhance the above method parameters etc
2) provide for a module "registration" mechanism
3) consider logging-like usage
"""
|
[
"[email protected]"
] | |
94ba0c13938603187c3de37f00105a9894637186
|
a359c7be79fd15809b659ae745352757b052e5fa
|
/web/pgadmin/feature_tests/table_ddl_feature_test.py
|
9fb90d662ca4225e39d306f7b864db3be67ce747
|
[
"PostgreSQL"
] |
permissive
|
harshal-dhumal/pgadmin4
|
579cfd91a1659d1e27445accb542511e73c88e4f
|
1977a5fcda44b78b00d6eaac2e6a99df355d5105
|
refs/heads/master
| 2020-12-02T22:16:34.682407 | 2017-07-03T10:19:02 | 2017-07-03T10:19:02 | 96,105,663 | 1 | 0 | null | 2017-07-03T11:54:02 | 2017-07-03T11:54:02 | null |
UTF-8
|
Python
| false | false | 2,316 |
py
|
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from regression.feature_utils.base_feature_test import BaseFeatureTest
from regression.python_test_utils import test_utils
class TableDdlFeatureTest(BaseFeatureTest):
""" This class test acceptance test scenarios """
scenarios = [
("Test table DDL generation", dict())
]
def before(self):
connection = test_utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
test_utils.drop_database(connection, "acceptance_test_db")
test_utils.create_database(self.server, "acceptance_test_db")
self.page.add_server(self.server)
def runTest(self):
test_utils.create_table(self.server, "acceptance_test_db", "test_table")
self.page.toggle_open_server(self.server['name'])
self.page.toggle_open_tree_item('Databases')
self.page.toggle_open_tree_item('acceptance_test_db')
self.page.toggle_open_tree_item('Schemas')
self.page.toggle_open_tree_item('public')
self.page.toggle_open_tree_item('Tables')
self.page.select_tree_item('test_table')
self.page.click_tab("SQL")
self.page.find_by_xpath(
"//*[contains(@class,'CodeMirror-lines') and contains(.,'CREATE TABLE public.test_table')]")
def after(self):
self.page.remove_server(self.server)
connection = test_utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
test_utils.drop_database(connection, "acceptance_test_db")
|
[
"[email protected]"
] | |
f197366b23568d36afd7a0adc83041610a363335
|
85eaa822b3a565163820a2c8f997c508c43b1d13
|
/Table/iris_table.py
|
ef34c72bb85f390990dfc039b6df43c383cf2c3b
|
[] |
no_license
|
woorud/GuiTest
|
5e59db21eeb640db734b114ff351f25bc12fcdce
|
50b35818b09220b73092a01e86dd9cee174fc3ae
|
refs/heads/master
| 2023-01-01T05:44:10.969120 | 2020-10-06T16:09:49 | 2020-10-06T16:09:49 | 293,092,869 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,197 |
py
|
import sys
from PyQt5.QtWidgets import QMainWindow, QApplication, QTableWidgetItem
from PyQt5.uic import loadUiType
import pymysql
import pandas as pd
form_class = loadUiType("iris_table.ui")[0]
class ViewerClass(QMainWindow, form_class):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
self.setupUi(self)
dbConn = pymysql.connect(user='######', passwd='######~', host='######', db='iris', charset='utf8')
cursor = dbConn.cursor(pymysql.cursors.DictCursor)
sql = "select a.SL, a.SW, a.PL, a.PW, b.Species_name from dataset a " \
"left join flower b on a.species = b.species;"
cursor.execute(sql)
result = cursor.fetchall()
result = pd.DataFrame(result)
self.tableWidget.setColumnCount(len(result.columns))
self.tableWidget.setRowCount(len(result))
self.tableWidget.setHorizontalHeaderLabels(result.columns)
for i in range(len(result)):
for j in range(len(result.columns)):
self.tableWidget.setItem(i, j, QTableWidgetItem(str(result.iloc[i, j])))
app = QApplication(sys.argv)
myWindow = ViewerClass(None)
myWindow.show()
app.exec()
|
[
"[email protected]"
] | |
92d3bb59489918ddcbcf0506d9c0336019d219e1
|
bc91d344ed2ee3f4f93547ec16350f2713e5f704
|
/.history/CRUD/views_20190108014602.py
|
5ca799bd23b34134d8e78785ed5186cce9299464
|
[] |
no_license
|
SabitDeepto/Chitra
|
10ecf0c4a7588234f0a50adf038783c9ce8706d0
|
160e5d64c8e4ee56a95bb639386785590160ff07
|
refs/heads/master
| 2020-04-27T21:55:09.685341 | 2019-03-09T16:14:35 | 2019-03-09T16:14:35 | 174,716,372 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 253 |
py
|
from django.shortcuts import render
from urllib3 import request
# Create your views here.
def home(request):
executive = Exe
templates = 'index.html'
context = {
'name': 'deepto'
}
return render(request, templates, context)
|
[
"[email protected]"
] | |
a798a0e843c705439966420e50750750658888f2
|
c7e028d71b5dd72eb18b72c6733e7e98a969ade6
|
/src/algoritmia/problems/traversals/treetraversals.py
|
418311d170e2b266f36eec01d2d8af08272ec59a
|
[
"MIT"
] |
permissive
|
antoniosarosi/algoritmia
|
da075a7ac29cc09cbb31e46b82ae0b0ea8ee992f
|
22b7d61e34f54a3dee03bf9e3de7bb4dd7daa31b
|
refs/heads/master
| 2023-01-24T06:09:37.616107 | 2020-11-19T16:34:09 | 2020-11-19T16:34:09 | 314,302,653 | 8 | 1 | null | null | null | null |
WINDOWS-1258
|
Python
| false | false | 3,233 |
py
|
#coding: latin1
from algoritmia.datastructures.queues import Fifo #[]level
from algoritmia.datastructures.queues import Lifo #[]prepro
from collections import namedtuple
from abc import ABCMeta, abstractmethod
class ITreeTraverser(metaclass=ABCMeta): #[interface
@abstractmethod
def traverse(self, tree: "IRootedTree<T>", #?tree?¶tree?
visitor: "IRootedTree<T> -> S") -> "Iterable<S>": pass #?vis?»vis? #]interface
class LevelOrderTreeTraverser(ITreeTraverser): #[level
def __init__(self, createFifo=lambda: Fifo()):
self.createFifo = createFifo
def traverse(self, tree: "IRootedTree<T>", #?tree?¶tree?
visitor: "IRootedTree<T> -> S"=None) -> "Iterable<S>":#?vis?»vis?
visitor = visitor or (lambda subtree: subtree.root)
Q = self.createFifo()
Q.push(tree)
yield visitor(tree)
while len(Q) > 0:
t = Q.pop()
for child in t.subtrees():
Q.push(child)
yield visitor(child) #]level
class PreorderTreeTraverser(ITreeTraverser):#[pre
def __init__(self, createLifo=lambda: Lifo()):
self.createLifo = createLifo
def traverse(self, tree: "IRootedTree<T>", #?tree?¶tree?
visitor: "IRootedTree<T> -> S"=None) -> "Iterable<S>":#?vis?»vis?
visitor = visitor or (lambda subtree: subtree.root)
Q = self.createLifo()
Q.push(tree)
while len(Q) > 0:
t = Q.pop()
yield visitor(t)
for st in reversed(tuple(t.subtrees())):
Q.push(st) #]pre
class PostorderTreeTraverser(ITreeTraverser): #[post
def __init__(self, createLifo=lambda: Lifo()):
self.createLifo = createLifo
def traverse(self, tree: "IRootedTree<T>", #?tree?¶tree?
visitor: "IRootedTree<T> -> S"=None) -> "Iterable<S>":#?vis?»vis?
visitor = visitor or (lambda subtree: subtree.root)
Q = self.createLifo()
Q.push(tree)
while len(Q) > 0:
t = Q.pop()
if isinstance(t, _ReadyToVisitTree):
yield visitor(t.tree)
else:
Q.push(_ReadyToVisitTree(t))
for st in reversed(tuple(t.subtrees())):
Q.push(st)
_ReadyToVisitTree = namedtuple("_ReadyToVisitTree", "tree") #]post
class InorderTreeTraverser(object): #[in
def __init__(self, createLifo=lambda: Lifo()):
self.createLifo = createLifo
def traverse(self, tree: "IRootedTree<t>", #?tree?¶tree?
visitor: "IRootedTree<T> -> S"=None) -> "Iterable<S>":#?vis?»vis?
visitor = visitor or (lambda subtree: subtree.root)
Q = self.createLifo()
Q.push(tree)
while len(Q) > 0:
t = Q.pop()
if isinstance(t, _ReadyToVisitTree):
yield visitor(t.tree)
else:
st= tuple(t.subtrees())
if len(st) == 2: Q.push(st[1])
Q.push(_ReadyToVisitTree(t))
if len(st) == 2: Q.push(st[0]) #]in
#]in
|
[
"amarzal@localhost"
] |
amarzal@localhost
|
7d6c3094add2dc3f6c27c81424781b777d17f603
|
4cb79aeadba003db92f295931012f4b85f0a10fa
|
/purkinje_model/neuron2morph.py
|
29afa103e60a24f35f9080a4d79b8f0cb7727d71
|
[] |
no_license
|
ModelDBRepository/225089
|
77f64de167ac148336189c0e1c93cb94f55ec000
|
4d8cfd8d93cf74eda52df7a14b988eed691dc27c
|
refs/heads/master
| 2020-05-29T18:26:35.794854 | 2019-05-31T03:23:31 | 2019-05-31T03:23:31 | 189,299,613 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 652 |
py
|
#########################################################################
# This script is provided for
#
# Chen W and De Schutter E (2017) Parallel STEPS: Large Scale Stochastic Spatial Reaction-Diffusion Simulation with High Performance Computers. Front. Neuroinform. 11:13. doi: 10.3389/fninf.2017.00013
#
##########################################################################
import sys
import steps.utilities.morph_support as morph_support
import cPickle
HOC_FILE = sys.argv[1]
MORPH_FILE = sys.argv[2]
moprhdata = morph_support.hoc2morph(HOC_FILE)
morph_file = open(MORPH_FILE, 'w')
cPickle.dump(moprhdata, morph_file)
morph_file.close()
|
[
"[email protected]"
] | |
49771ad3463b7f6cc1c7859994b5db00ce8fe7aa
|
48d232cc6dcf57abf6fca9cbbef8943e189acb04
|
/longest-peak-ae.py
|
81f9a6e95a133066bda1afc53954f3e21569d6c1
|
[] |
no_license
|
csusb-005411285/CodeBreakersCode
|
dae796ba4262770e0a568e9c27597a041db0775c
|
8f218164e1b9e42c1a928d22ef5a76328abb66a2
|
refs/heads/master
| 2022-01-12T09:11:33.668338 | 2021-12-27T04:45:13 | 2021-12-27T04:45:13 | 232,490,141 | 1 | 1 | null | 2021-01-29T23:09:14 | 2020-01-08T06:02:11 |
Python
|
UTF-8
|
Python
| false | false | 2,198 |
py
|
# tc: o(n2), sc: o(n)
def longestPeak(array):
if len(array) < 3:
return 0
peaks = get_peaks(array)
path_to_from_peak = get_path_from_peak(array, peaks)
if not path_to_from_peak:
return 0
max_len = float('-inf')
for i in path_to_from_peak:
max_len = max(max_len, len(path_to_from_peak[i]))
return max_len
def get_path_from_peak(array, peaks):
path = {}
for i in peaks:
forward = i + 1
backward = i - 1
path[i] = [array[backward], array[i], array[forward]]
while backward >= 0 and array[backward - 1] < array[backward]:
path[i].append(array[backward - 1]) #
backward -= 1
while forward < len(array) - 1 and array[forward + 1] < array[forward]:
path[i].append(array[forward + 1])
forward += 1
return path
# 2nd attempt
# tc: o(n), sc: o(n)
def longestPeak(array):
if len(array) < 3:
return 0
heights = {}
peaks = get_peaks(array)
if len(peaks) == 0:
return 0
heights = populate_default_height(heights, peaks)
heights = get_heights(heights, array)
return max(heights.values())
def get_heights(heights, array):
for index in heights.keys():
backward = index
forward = index
height_for_index = 0
while backward > 0:
if array[backward - 1] < array[backward]:
height_for_index += 1
backward -= 1
else:
break
heights[index] = height_for_index + 1
height_for_index = 0
while forward < len(array) - 1:
if array[forward + 1] < array[forward]:
height_for_index += 1
forward += 1
else:
break
heights[index] += height_for_index
return heights
def populate_default_height(heights, peaks):
for peak in peaks:
heights[peak] = 0
return heights
def get_peaks(array):
peaks = []
for i in range(1, len(array) - 1):
if array[i] > array[i - 1] and array[i] > array[i + 1]:
peaks.append(i)
return peaks
|
[
"[email protected]"
] | |
360dd301bf43ba71d73c73e7a4ae7f879c30f84f
|
048df2b4dc5ad153a36afad33831017800b9b9c7
|
/yukicoder/yuki_0083.py
|
53f365fdf409ffe0ed76ea7a7d1529237b73802a
|
[] |
no_license
|
fluffyowl/past-submissions
|
a73e8f5157c647634668c200cd977f4428c6ac7d
|
24706da1f79e5595b2f9f2583c736135ea055eb7
|
refs/heads/master
| 2022-02-21T06:32:43.156817 | 2019-09-16T00:17:50 | 2019-09-16T00:17:50 | 71,639,325 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 50 |
py
|
n=input();print '7'*(n%2)+'1'*(n/2-1)+'1'*(1-n%2)
|
[
"[email protected]"
] | |
fdbe466ecb4fc79c93bb6a16feee06d295ce8d0b
|
f121695e2dff353607fa47fb42482470e03bbf8a
|
/capitulo_08-Funcoes/magicos.py
|
908835a823b126a47b6d631c241eec625501b463
|
[] |
no_license
|
ranog/python_work
|
76cbcf784c86fae4482be5383223e4b0a34f4130
|
47c442a90dcf32d5aef70858693a772a3c76a7ac
|
refs/heads/master
| 2022-12-22T11:02:26.482059 | 2021-04-17T01:12:22 | 2021-04-17T01:12:22 | 233,634,221 | 2 | 1 | null | 2022-12-08T07:38:43 | 2020-01-13T15:58:46 |
Python
|
UTF-8
|
Python
| false | false | 916 |
py
|
#! /usr/bin/env python3
"""
NOME
magicos.py - FAÇA VOCÊ MESMO.
SINOPSES
chmod +x magicos.py
./magicos.py
Mágicos famosos:
- Harry Houdini
- Fu-Manchu
- Richiardi Jr
- Jasper Maskelyne
- Dai Vernon
- David Blaine
- Siegfried Fischbacher
- David Copperfield
DESCRIÇÃO
8.9 – Mágicos: Crie uma lista de nomes de mágicos. Passe a lista
para uma função chamada show_magicians() que exiba o nome de cada
mágico da lista.
HISTÓRICO
20200611: João Paulo, outubro de 2020.
- 8.9 - Mágicos (pg 187).
"""
def show_magicians(magicians):
for magic in magicians:
print("- " + magic)
magicians = ['Harry Houdini', 'Fu-Manchu', 'Richiardi Jr',
'Jasper Maskelyne', 'Dai Vernon', 'David Blaine',
'Siegfried Fischbacher', 'David Copperfield', ]
print("\nMágicos famosos: ")
show_magicians(magicians)
|
[
"[email protected]"
] | |
77fd6bb439e708106d8cb5ef3ab6313444780583
|
2455062787d67535da8be051ac5e361a097cf66f
|
/Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_450.py
|
540db515812846a4d296cd0d53441e47d7f6a26e
|
[] |
no_license
|
kmtos/BBA-RecoLevel
|
6e153c08d5ef579a42800f6c11995ee55eb54846
|
367adaa745fbdb43e875e5ce837c613d288738ab
|
refs/heads/master
| 2021-01-10T08:33:45.509687 | 2015-12-04T09:20:14 | 2015-12-04T09:20:14 | 43,355,189 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,360 |
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("PAT")
#process.load("BBA/Analyzer/bbaanalyzer_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.EventContent.EventContent_cff')
process.load("Configuration.Geometry.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff")
process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.load("Configuration.StandardSequences.MagneticField_cff")
####################
# Message Logger
####################
process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
## switch to uncheduled mode
process.options.allowUnscheduled = cms.untracked.bool(True)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(500)
)
####################
# Input File List
####################
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_450.root'),
secondaryFileNames = cms.untracked.vstring()
)
############################################################
# Defining matching in DeltaR, sorting by best DeltaR
############################################################
process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR",
src = cms.InputTag( 'slimmedMuons' ),
matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects
matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT
maxDPtRel = cms.double( 0.5 ), # no effect here
maxDeltaR = cms.double( 0.3 ), #### selection of matches
maxDeltaEta = cms.double( 0.2 ), # no effect here
resolveAmbiguities = cms.bool( True ),# definition of matcher output
resolveByMatchQuality = cms.bool( True )# definition of matcher output
)
# talk to output module
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_450.root"),
outputCommands = process.MINIAODSIMEventContent.outputCommands
)
process.out.outputCommands += [ 'drop *_*_*_*',
'keep *_*slimmed*_*_*',
'keep *_pfTausEI_*_*',
'keep *_hpsPFTauProducer_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep *_TriggerResults_*_HLT',
'keep *_patTrigger*_*_*',
'keep *_prunedGenParticles_*_*',
'keep *_mOniaTrigMatch_*_*'
]
################################################################################
# Running the matching and setting the the trigger on
################################################################################
from PhysicsTools.PatAlgos.tools.trigTools import *
switchOnTrigger( process ) # This is optional and can be omitted.
switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch'
])
process.outpath = cms.EndPath(process.out)
|
[
"[email protected]"
] | |
5e45d92c570a19aed882561fb8ce582ded4238ea
|
aeac131d9da991853a7eb0a68bc7be4f848b9ed6
|
/API_DB/adjacent.py
|
0c0f176ff137a3bd764efc066cb40a5b8bbd3aaa
|
[] |
no_license
|
xuqil/-Crawler
|
d5b10c137beedb9daa8a33facf8ed80f62e8e53f
|
c9703d8ee2a7ea66ae50d20e53247932987122e6
|
refs/heads/master
| 2020-04-04T11:25:34.729191 | 2018-12-15T11:49:16 | 2018-12-15T11:49:16 | 155,890,694 | 0 | 0 | null | 2018-12-06T11:43:28 | 2018-11-02T16:09:30 |
HTML
|
UTF-8
|
Python
| false | false | 1,708 |
py
|
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, ForeignKey
from sqlalchemy.types import Integer, Unicode
from sqlalchemy.orm import relationship, sessionmaker, joinedload, joinedload_all
Base = declarative_base()
Engine = create_engine('mysql+mysqlconnector://root:[email protected]:3306/test2', encoding='utf8')
DBSession = sessionmaker(bind=Engine)
session = DBSession()
class Node(Base):
__tablename__ = 'node'
id = Column(Integer, autoincrement=True, primary_key=True)
name = Column(Unicode(32), nullable=False, server_default='')
parent = Column(Integer, ForeignKey('node.id'), index=True, server_default=None)
children = relationship('Node', lazy='joined', cascade='all, delete-orphan')
parent_obj = relationship('Node', remote_side=[id])
def init_db():
Base.metadata.create_all(Engine)
# init_db()
# # n = session.query(Node).filter(Node.name == u'小猪').first()
# n = session.query(Node).filter(Node.name == u'小猪').options(joinedload('parent_obj')).first()
# print(n.id)
# n = session.query(Node).filter(Node.name == u'大直沽').options(joinedload('children').joinedload('children')).first()
# print(n.name)
# print(n.children[0].name)
# print(n.children[0].children[0].name)
# n = session.query(Node).filter(Node.name == u'大直沽').options(joinedload_all('children', 'children')).first()
# print(n.name)
# print(n.children[0].name)
# print(n.children[0].children[0].name)
# n = session.query(Node).filter(Node.name == u'小猪').first()
# session.delete(n)
# session.commit()
n = session.query(Node).filter(Node.name == u'等等').first()
n.children = []
session.commit()
|
[
"[email protected]"
] | |
d10a5e2e66da614421ca8064583cae2b09a27942
|
09aea7ebe2ce7214ac9f18741e85e49a3d8bcd5e
|
/testes.py
|
f37cee7c457681542087f83cf56ef77f62f49f01
|
[] |
no_license
|
bmsrangel/Projeto_Biblioteca
|
3555bf10058d450ad3d3b61bb20bd7427fe65a4d
|
4789c8070d194dd1ab8e1c2c0e7cc3102086d058
|
refs/heads/master
| 2020-04-27T15:12:24.472246 | 2019-03-07T23:51:35 | 2019-03-07T23:51:35 | 174,436,851 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,407 |
py
|
import unittest
from livro import Livro
from pessoa import Pessoa
from emprestimo import Emprestimo
from database import Database
class TestDatabase(unittest.TestCase):
def setUp(self):
self.db = Database()
self.db.conectar()
self.db.criar_tabelas()
self.db.desconectar()
def test_00_inclusao_pessoa(self):
self.db.conectar()
# self.db.inserir_pessoa('Bruno Rangel', '1234', '17/02/1990')
# self.db.inserir_pessoa('Edilania Silva', '5678', '25/10/1991')
self.assertEqual((2, 'Edilania Silva', '5678', '25/10/1991', 0.0), self.db.buscar_pessoa('5678'))
self.db.desconectar()
def test_01_alterar_multa(self):
self.db.conectar()
self.db.definir_multa('1234', 3.0)
self.assertEqual((1, 'Bruno Rangel', '1234', '17/02/1990', 3.0), self.db.buscar_pessoa('1234'))
self.db.desconectar()
def test_02_inclusao_livro(self):
self.db.conectar()
# self.db.inserir_livro('origem', 'dan brown', 'doubleday', 2017, 3)
self.assertEqual((1, 'Origem', 'Dan Brown', 'Doubleday', 2017, 4), self.db.buscar_livro('origem'))
self.db.desconectar()
def test_03_alterar_quantidade_livros(self):
self.db.conectar()
self.db.alterar_quantidade('origem', 4)
self.assertEqual((1, 'Origem', 'Dan Brown', 'Doubleday', 2017, 4), self.db.buscar_livro('origem'))
self.db.desconectar()
def test_04_novo_emprestimo(self):
self.db.conectar()
# self.db.novo_emprestimo('1234', 'origem')
self.assertEqual((1, '06/03/2019', 'E', 'Bruno Rangel', 'Origem', 'Dan Brown'), self.db.buscar_emprestimo('1234'))
self.db.desconectar()
def test_05_alterar_situacao_emprestimo(self):
self.db.conectar()
self.db.alterar_situacao_emprestimo(1, 'E')
self.assertEqual((1, '06/03/2019', 'E', 'Bruno Rangel', 'Origem', 'Dan Brown'), self.db.buscar_emprestimo('1234'))
self.db.desconectar()
class TestPessoa(unittest.TestCase):
def setUp(self):
self.pessoa = Pessoa('Lidia Gandra', '1011', '15/04/1991')
def test_00_cadastrarPessoa(self):
# self.pessoa.cadastrar()
# self.assertEqual(self.pessoa, Pessoa.consultar_pessoa('1011'))
self.assertTrue(self.pessoa == Pessoa.consultar_pessoa('1011'))
def test_01_multar(self):
self.pessoa.multar(2.5)
self.assertTrue(self.pessoa == Pessoa.consultar_pessoa('1011'))
def test_02_pagar_multa(self):
self.pessoa.pagar_multa()
self.assertTrue(self.pessoa == Pessoa.consultar_pessoa('1011'))
def test_03_pessoa_inexistente(self):
self.assertFalse(Pessoa.consultar_pessoa('1213'))
class TestLivro(unittest.TestCase):
def setUp(self):
self.livro = Livro('A Cabana', 'William Young', 'Sextante', 2007, 10)
def test_00_cadastrar_livro(self):
# self.livro.cadastrar_livro()
self.livro.quantidade = 8
self.assertTrue(self.livro == Livro.consultar_livro('a cabana'))
def test_01_alterar_quantidade_livro(self):
self.livro.alterar_quantidade(8)
self.assertTrue(self.livro == Livro.consultar_livro('a cabana'))
def test_02_livro_inexistente(self):
self.assertFalse(Livro.consultar_livro('madagascar'))
if __name__ == '__main__':
unittest.main(verbosity=3)
|
[
"[email protected]"
] | |
d34a0afb822ac7683ecc3dbd62212c93c518ce50
|
fbddd92b0eafcc79a24c05cbfe5e688174a5be27
|
/laraPython/day22/test3.py
|
1e09604b65b28a1a5c951ac379d68d7148892052
|
[] |
no_license
|
priyankakumbha/python
|
968902b82811c31020b6ad2d853fee8ef64ec1a5
|
ae428d56e4afffa2fc16a8e03e99eeb6bf560037
|
refs/heads/master
| 2022-08-26T13:31:26.874275 | 2020-05-22T12:47:22 | 2020-05-22T12:47:22 | 266,107,443 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 75 |
py
|
s1 = 'abc
xyz'
s2 = "abc xyz
123"
print(s1)
print(s2)
|
[
"[email protected]"
] | |
2f26d83ce23d45e2fed8f7f4851fecfb82a00b63
|
c7c969259d9600eaa152d6896b8c3278e019f8c1
|
/cluster/util.py
|
5d2baf066105077e0c19e50a6cd6507549c3fc92
|
[] |
no_license
|
jwintersinger/csc2515-project
|
4874d1fec5c3825cff7091ac6b9af147be88b9c5
|
ccd71032ae0617a2cc125c73b9b0af6e92a902c0
|
refs/heads/master
| 2016-09-11T12:20:29.766193 | 2015-04-17T03:23:08 | 2015-04-17T03:23:08 | 34,094,429 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 682 |
py
|
import multiprocessing
import numpy as np
def load_mutpairs(fnames, in_parallel=True, limit=None):
if limit:
fnames = fnames[:limit]
if in_parallel:
results = multiprocessing.Pool(8).map(load_single_mutpairs, fnames)
else:
results = map(load_single_mutpairs, fnames)
mutpairs = np.zeros(shape=(len(fnames), results[0].shape[0], 4), dtype=np.bool)
for i, fname in enumerate(fnames):
mutpairs[i,:,:] = results[i]
return mutpairs
def load_single_mutpairs(fname):
t = np.loadtxt(fname)
t = t.astype(np.bool, copy=False)
t = t.T
return t
def extract_score(fname):
fname = fname.rsplit('_', 1)[1]
fname = fname.rsplit('.', 2)[0]
return fname
|
[
"[email protected]"
] | |
d46a62a0887148e9646d19c3afb237ce53409f3d
|
bd8ec52d55798ae62bbea1906847f56b37593911
|
/vtol/python/hw11/vtolController_lat.py
|
b9da0e6822e1f52d3f630250183268a61562076e
|
[] |
no_license
|
jaringson/Controls_EcEn483
|
2d4e4a65d84afb8f9ddc74925b85349d348a59d5
|
1ca4f24dad65ce92f1ab5310242adf08062e22d1
|
refs/heads/master
| 2021-08-29T22:14:18.007719 | 2017-12-15T05:06:42 | 2017-12-15T05:06:42 | 103,585,334 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,987 |
py
|
import vtolParamHW11_lat as P
import sys
sys.path.append('..') # add parent directory
import vtolParam as P0
import numpy as np
class vtolController_lat:
'''
This class inherits other controllers in order to organize multiple controllers.
'''
def __init__(self):
# Instantiates the SS_ctrl object
self.z_dot = 0.0 # derivative of z
self.theta_dot = 0.0 # derivative of theta
self.z_d1 = 0.0 # Position z delayed by 1 sample
self.theta_d1 = 0.0 # Angle theta delayed by 1 sample
self.K = P.K # state feedback gain
self.kr = P.kr # Input gain
self.limit = P.tau_max # Maxiumum force
self.beta = P.beta # dirty derivative gain
self.Ts = P.Ts # sample rate of controller
def u(self, y_r, y):
# y_r is the referenced input
# y is the current state
z_r = y_r[0]
z = y[0]
theta = y[2]
# differentiate z and theta
self.differentiateZ(z)
self.differentiateTheta(theta)
# Construct the state
x = np.matrix([[z], [theta], [self.z_dot], [self.theta_dot]])
# Compute the state feedback controller
tau_tilde = -self.K*x + self.kr*z_r
tau_e = 0
tau = tau_e + tau_tilde
tau = self.saturate(tau)
return [tau]
def differentiateZ(self, z):
'''
differentiate z
'''
self.z_dot = self.beta*self.z_dot + (1-self.beta)*((z - self.z_d1) / self.Ts)
self.z_d1 = z
def differentiateTheta(self, theta):
'''
differentiate theta
'''
self.theta_dot = self.beta*self.theta_dot + (1-self.beta)*((theta - self.theta_d1) / self.Ts)
self.theta_d1 = theta
def saturate(self, u):
if abs(u) > self.limit:
u = self.limit*np.sign(u)
return u
|
[
"[email protected]"
] | |
79510f66671377a5b36333913b1852dbca847db4
|
8afb5afd38548c631f6f9536846039ef6cb297b9
|
/MY_REPOS/INTERVIEW-PREP-COMPLETE/notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/_Another-One/networking_flow/Minimum_cut.py
|
33daad3378607ec598d9c9fbdfc725885e05db27
|
[
"MIT"
] |
permissive
|
bgoonz/UsefulResourceRepo2.0
|
d87588ffd668bb498f7787b896cc7b20d83ce0ad
|
2cb4b45dd14a230aa0e800042e893f8dfb23beda
|
refs/heads/master
| 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 |
MIT
| 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null |
UTF-8
|
Python
| false | false | 1,583 |
py
|
# Minimum cut on Ford_Fulkerson algorithm.
def BFS(graph, s, t, parent):
# Return True if there is node that has not iterated.
visited = [False] * len(graph)
queue = []
queue.append(s)
visited[s] = True
while queue:
u = queue.pop(0)
for ind in range(len(graph[u])):
if visited[ind] == False and graph[u][ind] > 0:
queue.append(ind)
visited[ind] = True
parent[ind] = u
return True if visited[t] else False
def mincut(graph, source, sink):
# This array is filled by BFS and to store path
parent = [-1] * (len(graph))
max_flow = 0
res = []
temp = [i[:] for i in graph] # Record orignial cut, copy.
while BFS(graph, source, sink, parent):
path_flow = float("Inf")
s = sink
while s != source:
# Find the minimum value in select path
path_flow = min(path_flow, graph[parent[s]][s])
s = parent[s]
max_flow += path_flow
v = sink
while v != source:
u = parent[v]
graph[u][v] -= path_flow
graph[v][u] += path_flow
v = parent[v]
for i in range(len(graph)):
for j in range(len(graph[0])):
if graph[i][j] == 0 and temp[i][j] > 0:
res.append((i, j))
return res
graph = [
[0, 16, 13, 0, 0, 0],
[0, 0, 10, 12, 0, 0],
[0, 4, 0, 0, 14, 0],
[0, 0, 9, 0, 0, 20],
[0, 0, 0, 7, 0, 4],
[0, 0, 0, 0, 0, 0],
]
source, sink = 0, 5
print(mincut(graph, source, sink))
|
[
"[email protected]"
] | |
7a395e238daa5075eff6cfc90de04a32bd859c45
|
bbeecb7cff56a96c580709b425823cde53f21621
|
/msw/spots/caribbean/anguilla.py
|
7c5f4514d34e7edf2d1f1006a88b39a1195afddd
|
[] |
no_license
|
hhubbell/python-msw
|
f8a2ef8628d545b3d57a5e54468222177dc47b37
|
5df38db1dc7b3239a6d00e0516f2942077f97099
|
refs/heads/master
| 2020-04-05T23:46:21.209888 | 2015-06-16T01:36:43 | 2015-06-16T01:36:43 | 37,476,303 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 47 |
py
|
WILDERNESS = 483
CUPECOY = 484
THE_LEDGE = 485
|
[
"[email protected]"
] | |
d4ec7a6a271ddb1d7d1065ab3dd6f881f89858a5
|
ed54290846b5c7f9556aacca09675550f0af4c48
|
/salt/salt/pillar/git_pillar.py
|
5cbd384421890ee5f50b6e574abab9fec6f4ed27
|
[
"Apache-2.0"
] |
permissive
|
smallyear/linuxLearn
|
87226ccd8745cd36955c7e40cafd741d47a04a6f
|
342e5020bf24b5fac732c4275a512087b47e578d
|
refs/heads/master
| 2022-03-20T06:02:25.329126 | 2019-08-01T08:39:59 | 2019-08-01T08:39:59 | 103,765,131 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 16,836 |
py
|
# -*- coding: utf-8 -*-
'''
Use a git repository as a Pillar source
---------------------------------------
.. note::
This external pillar has been rewritten for the :doc:`2015.8.0
</topics/releases/2015.8.0>` release. The old method of configuring this
external pillar will be maintained for a couple releases, allowing time for
configurations to be updated to reflect the new usage.
This external pillar allows for a Pillar top file and Pillar SLS files to be
sourced from a git repository.
However, since git_pillar does not have an equivalent to the
:conf_master:`pillar_roots` parameter, configuration is slightly different. The
Pillar top file must still contain the relevant environment, like so:
.. code-block:: yaml
base:
'*':
- foo
The branch/tag which maps to that environment must then be specified along with
the repo's URL. Configuration details can be found below.
.. _git-pillar-pre-2015-8-0:
Configuring git_pillar for Salt releases before 2015.8.0
========================================================
For Salt releases earlier than :doc:`2015.8.0 </topics/releases/2015.8.0>`,
GitPython is the only supported provider for git_pillar. Individual
repositories can be configured under the :conf_master:`ext_pillar`
configuration parameter like so:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git root=subdirectory
The repository is specified in the format ``<branch> <repo_url>``, with an
optional ``root`` parameter (added in the :doc:`2014.7.0
</topics/releases/2014.7.0>` release) which allows the pillar SLS files to be
served up from a subdirectory (similar to :conf_master:`gitfs_root` in gitfs).
To use more than one branch from the same repo, multiple lines must be
specified under :conf_master:`ext_pillar`:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git
- git: dev https://gitserver/git-pillar.git
To remap a specific branch to a specific Pillar environment, use the format
``<branch>:<env>``:
.. code-block:: yaml
ext_pillar:
- git: develop:dev https://gitserver/git-pillar.git
- git: master:prod https://gitserver/git-pillar.git
In this case, the ``develop`` branch would need its own ``top.sls`` with a
``dev`` section in it, like this:
.. code-block:: yaml
dev:
'*':
- bar
The ``master`` branch would need its own ``top.sls`` with a ``prod`` section in
it:
.. code-block:: yaml
prod:
'*':
- bar
If ``__env__`` is specified as the branch name, then git_pillar will use the
branch specified by :conf_master:`gitfs_base`:
.. code-block:: yaml
ext_pillar:
- git: __env__ https://gitserver/git-pillar.git root=pillar
The corresponding Pillar top file would look like this:
.. code-block:: yaml
{{env}}:
'*':
- bar
.. _git-pillar-2015-8-0-and-later:
Configuring git_pillar for Salt releases 2015.8.0 and later
===========================================================
.. note::
In version 2015.8.0, the method of configuring git external pillars has
changed, and now more closely resembles that of the :ref:`Git Fileserver
Backend <tutorial-gitfs>`. If Salt detects the old configuration schema, it
will use the pre-2015.8.0 code to compile the external pillar. A warning
will also be logged.
Beginning with Salt version 2015.8.0, pygit2_ is now supported in addition to
GitPython_ (Dulwich_ will not be supported for the forseeable future). The
requirements for GitPython_ and pygit2_ are the same as for gitfs, as described
:ref:`here <gitfs-dependencies>`.
.. important::
git_pillar has its own set of global configuration parameters. While it may
seem intuitive to use the global gitfs configuration parameters
(:conf_master:`gitfs_base`, etc.) to manage git_pillar, this will not work.
The main difference for this is the fact that the different components
which use Salt's git backend code do not all function identically. For
instance, in git_pillar it is necessary to specify which branch/tag to be
used for git_pillar remotes. This is the reverse behavior from gitfs, where
branches/tags make up your environments.
See :ref:`here <git_pillar-config-opts>` for documentation on the
git_pillar configuration options and their usage.
Here is an example git_pillar configuration:
.. code-block:: yaml
ext_pillar:
- git:
# Use 'prod' instead of the branch name 'production' as the environment
- production https://gitserver/git-pillar.git:
- env: prod
# Use 'dev' instead of the branch name 'develop' as the environment
- develop https://gitserver/git-pillar.git:
- env: dev
# No per-remote config parameters (and no trailing colon), 'qa' will
# be used as the environment
- qa https://gitserver/git-pillar.git
# SSH key authentication
- master git@other-git-server:pillardata-ssh.git:
# Pillar SLS files will be read from the 'pillar' subdirectory in
# this repository
- root: pillar
- privkey: /path/to/key
- pubkey: /path/to/key.pub
- passphrase: CorrectHorseBatteryStaple
# HTTPS authentication
- master https://other-git-server/pillardata-https.git:
- user: git
- password: CorrectHorseBatteryStaple
The main difference between this and the old way of configuring git_pillar is
that multiple remotes can be configured under one ``git`` section under
:conf_master:`ext_pillar`. More than one ``git`` section can be used, but it is
not necessary. Remotes will be evaluated sequentially.
Per-remote configuration parameters are supported (similar to :ref:`gitfs
<gitfs-per-remote-config>`), and global versions of the git_pillar
configuration parameters can also be set.
With the addition of pygit2_ support, git_pillar can now interact with
authenticated remotes. Authentication works just like in gitfs (as outlined in
the :ref:`Git Fileserver Backend Walkthrough <gitfs-authentication>`), only
with the global authenication parameter names prefixed with ``git_pillar``
instead of ``gitfs`` (e.g. :conf_master:`git_pillar_pubkey`,
:conf_master:`git_pillar_privkey`, :conf_master:`git_pillar_passphrase`, etc.).
.. _GitPython: https://github.com/gitpython-developers/GitPython
.. _pygit2: https://github.com/libgit2/pygit2
.. _Dulwich: https://www.samba.org/~jelmer/dulwich/
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import hashlib
import os
# Import salt libs
import salt.utils.gitfs
import salt.utils.dictupdate
from salt.exceptions import FileserverConfigError
from salt.pillar import Pillar
# Import third party libs
import salt.ext.six as six
# pylint: disable=import-error
try:
import git
HAS_GITPYTHON = True
except ImportError:
HAS_GITPYTHON = False
# pylint: enable=import-error
PER_REMOTE_OVERRIDES = ('env', 'root', 'ssl_verify')
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'git'
def __virtual__():
'''
Only load if gitpython is available
'''
git_ext_pillars = [x for x in __opts__['ext_pillar'] if 'git' in x]
if not git_ext_pillars:
# No git external pillars were configured
return False
for ext_pillar in git_ext_pillars:
if isinstance(ext_pillar['git'], six.string_types):
# Verification of legacy git pillar configuration
if not HAS_GITPYTHON:
log.error(
'Git-based ext_pillar is enabled in configuration but '
'could not be loaded, is GitPython installed?'
)
return False
if not git.__version__ > '0.3.0':
return False
return __virtualname__
else:
# Verification of new git pillar configuration
try:
salt.utils.gitfs.GitPillar(__opts__)
# Initialization of the GitPillar object did not fail, so we
# know we have valid configuration syntax and that a valid
# provider was detected.
return __virtualname__
except FileserverConfigError:
pass
return False
def ext_pillar(minion_id, repo, pillar_dirs):
'''
Checkout the ext_pillar sources and compile the resulting pillar SLS
'''
if isinstance(repo, six.string_types):
return _legacy_git_pillar(minion_id, repo, pillar_dirs)
else:
opts = copy.deepcopy(__opts__)
opts['pillar_roots'] = {}
pillar = salt.utils.gitfs.GitPillar(opts)
pillar.init_remotes(repo, PER_REMOTE_OVERRIDES)
pillar.checkout()
ret = {}
merge_strategy = __opts__.get(
'pillar_source_merging_strategy',
'smart'
)
merge_lists = __opts__.get(
'pillar_merge_lists',
False
)
for pillar_dir, env in six.iteritems(pillar.pillar_dirs):
log.debug(
'git_pillar is processing pillar SLS from {0} for pillar '
'env \'{1}\''.format(pillar_dir, env)
)
all_dirs = [d for (d, e) in six.iteritems(pillar.pillar_dirs)
if env == e]
# Ensure that the current pillar_dir is first in the list, so that
# the pillar top.sls is sourced from the correct location.
pillar_roots = [pillar_dir]
pillar_roots.extend([x for x in all_dirs if x != pillar_dir])
opts['pillar_roots'] = {env: pillar_roots}
local_pillar = Pillar(opts, __grains__, minion_id, env)
ret = salt.utils.dictupdate.merge(
ret,
local_pillar.compile_pillar(ext=False),
strategy=merge_strategy,
merge_lists=merge_lists
)
return ret
# Legacy git_pillar code
class _LegacyGitPillar(object):
'''
Deal with the remote git repository for Pillar
'''
def __init__(self, branch, repo_location, opts):
'''
Try to initialize the Git repo object
'''
self.branch = self.map_branch(branch, opts)
self.rp_location = repo_location
self.opts = opts
self._envs = set()
self.working_dir = ''
self.repo = None
hash_type = getattr(hashlib, opts.get('hash_type', 'md5'))
hash_str = '{0} {1}'.format(self.branch, self.rp_location)
repo_hash = hash_type(hash_str).hexdigest()
rp_ = os.path.join(self.opts['cachedir'], 'pillar_gitfs', repo_hash)
if not os.path.isdir(rp_):
os.makedirs(rp_)
try:
self.repo = git.Repo.init(rp_)
except (git.exc.NoSuchPathError,
git.exc.InvalidGitRepositoryError) as exc:
log.error('GitPython exception caught while '
'initializing the repo: {0}. Maybe '
'git is not available.'.format(exc))
# Git directory we are working on
# Should be the same as self.repo.working_dir
self.working_dir = rp_
if isinstance(self.repo, git.Repo):
if not self.repo.remotes:
try:
self.repo.create_remote('origin', self.rp_location)
# ignore git ssl verification if requested
if self.opts.get('pillar_gitfs_ssl_verify', True):
self.repo.git.config('http.sslVerify', 'true')
else:
self.repo.git.config('http.sslVerify', 'false')
except os.error:
# This exception occurs when two processes are
# trying to write to the git config at once, go
# ahead and pass over it since this is the only
# write.
# This should place a lock down.
pass
else:
if self.repo.remotes.origin.url != self.rp_location:
self.repo.remotes.origin.config_writer.set(
'url', self.rp_location)
def map_branch(self, branch, opts=None):
opts = __opts__ if opts is None else opts
if branch == '__env__':
branch = opts.get('environment') or 'base'
if branch == 'base':
branch = opts.get('gitfs_base') or 'master'
elif ':' in branch:
branch = branch.split(':', 1)[0]
return branch
def update(self):
'''
Ensure you are following the latest changes on the remote
Return boolean whether it worked
'''
try:
log.debug('Updating fileserver for git_pillar module')
self.repo.git.fetch()
except git.exc.GitCommandError as exc:
log.error('Unable to fetch the latest changes from remote '
'{0}: {1}'.format(self.rp_location, exc))
return False
try:
self.repo.git.checkout('origin/{0}'.format(self.branch))
except git.exc.GitCommandError as exc:
log.error('Unable to checkout branch '
'{0}: {1}'.format(self.branch, exc))
return False
return True
def envs(self):
'''
Return a list of refs that can be used as environments
'''
if isinstance(self.repo, git.Repo):
remote = self.repo.remote()
for ref in self.repo.refs:
parted = ref.name.partition('/')
short = parted[2] if parted[2] else parted[0]
if isinstance(ref, git.Head):
if short == 'master':
short = 'base'
if ref not in remote.stale_refs:
self._envs.add(short)
elif isinstance(ref, git.Tag):
self._envs.add(short)
return list(self._envs)
def _legacy_git_pillar(minion_id, repo_string, pillar_dirs):
'''
Support pre-Beryllium config schema
'''
if pillar_dirs is None:
return
# split the branch, repo name and optional extra (key=val) parameters.
options = repo_string.strip().split()
branch_env = options[0]
repo_location = options[1]
root = ''
for extraopt in options[2:]:
# Support multiple key=val attributes as custom parameters.
DELIM = '='
if DELIM not in extraopt:
log.error('Incorrectly formatted extra parameter. '
'Missing \'{0}\': {1}'.format(DELIM, extraopt))
key, val = _extract_key_val(extraopt, DELIM)
if key == 'root':
root = val
else:
log.warning('Unrecognized extra parameter: {0}'.format(key))
# environment is "different" from the branch
cfg_branch, _, environment = branch_env.partition(':')
gitpil = _LegacyGitPillar(cfg_branch, repo_location, __opts__)
branch = gitpil.branch
if environment == '':
if branch == 'master':
environment = 'base'
else:
environment = branch
# normpath is needed to remove appended '/' if root is empty string.
pillar_dir = os.path.normpath(os.path.join(gitpil.working_dir, root))
pillar_dirs.setdefault(pillar_dir, {})
if cfg_branch == '__env__' and branch not in ['master', 'base']:
gitpil.update()
elif pillar_dirs[pillar_dir].get(branch, False):
return {} # we've already seen this combo
pillar_dirs[pillar_dir].setdefault(branch, True)
# Don't recurse forever-- the Pillar object will re-call the ext_pillar
# function
if __opts__['pillar_roots'].get(branch, []) == [pillar_dir]:
return {}
opts = copy.deepcopy(__opts__)
opts['pillar_roots'][environment] = [pillar_dir]
pil = Pillar(opts, __grains__, minion_id, branch)
return pil.compile_pillar(ext=False)
def _update(branch, repo_location):
'''
Ensure you are following the latest changes on the remote
return boolean whether it worked
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.update()
def _envs(branch, repo_location):
'''
Return a list of refs that can be used as environments
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.envs()
def _extract_key_val(kv, delimiter='='):
'''Extract key and value from key=val string.
Example:
>>> _extract_key_val('foo=bar')
('foo', 'bar')
'''
pieces = kv.split(delimiter)
key = pieces[0]
val = delimiter.join(pieces[1:])
return key, val
|
[
"[email protected]"
] | |
65f119cbba27c8105c81c41132cf967b03783924
|
bdda458001808a029b171c09286f022a1384d180
|
/prove/provascript.py
|
b5d257652e5cc5bda09a9bc81170655522f2767a
|
[] |
no_license
|
bianchimro/crm-django
|
4189f5c0c31f03d23a2b644a14403d63b8efdf0a
|
d8e4d18174cb050fd7a22d53fe8bb152e6e43120
|
refs/heads/master
| 2021-04-27T15:15:28.219887 | 2018-02-22T16:51:00 | 2018-02-22T16:51:00 | 122,466,604 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 321 |
py
|
import sys
from libreria import helpers
def provafunzione(a, b=10, c=6):
print(a)
print(b)
provafunzione(10)
provafunzione(10, b=2)
provafunzione(10, b=2, c=10)
provafunzione(10, c=10)
def main(msg):
print(msg)
x = helpers.somma(1, 2)
print(x)
if __name__ == '__main__':
main(sys.argv[1])
|
[
"[email protected]"
] | |
40d0f3711fc75767e77a31c5fdf441b71a49d137
|
89b7b6375226e5224321e8e467b1047830bd2073
|
/easy/palindromeLinkedList.py
|
ecedabb47a8c88feecda91c0c12e7b75e6f9ff09
|
[] |
no_license
|
xiao-bo/leetcode
|
9cf5ec1dd86faa699f51b3a616929da4ebdb3053
|
671383b9ee745ed84fbb6d76a91d8be353710096
|
refs/heads/master
| 2023-03-18T19:10:28.990610 | 2023-03-11T10:11:43 | 2023-03-11T10:11:43 | 154,341,943 | 0 | 0 | null | 2023-03-11T10:11:44 | 2018-10-23T14:24:15 |
Python
|
UTF-8
|
Python
| false | false | 2,993 |
py
|
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def isPalindrome(self, head):
"""
:type head: ListNode
:rtype: bool
"""
# Definition for singly-linked list.
# O(n) and space is O(n)
# Runtime 350 ms Beats 30.14% Memory 17 MB Beats 13.53%
nums = []
current = head
while current:
nums.append(current.val)
current = current.next
for i in range(0, int(len(nums)/2)):
if nums[i] == nums[len(nums)-1-i]:
continue
else:
return False
return True
## first method
## Runtime: 56 ms, faster than 98.70% of Python online submissions for Palindrome Linked List.
## Memory Usage: 32.3 MB, less than 6.90% of Python online submissions for Palindrome Linked List.
## time complexity is O(n)
## space complexity is O(n)
'''
num = []
while head:
num.append(head.val)
head = head.next
print(num)
flag = True
length = len(num)
for x in range(0,length):
print("nums[{}] = {} nums[{}] = {}".format(x,num[x],length-x-1,num[length-x-1]))
if num[x] == num[length-x-1]:
continue
else:
flag = False
break
return flag
'''
## second method
## Runtime: 76 ms, faster than 34.63% of Python online submissions for Palindrome Linked List.
## Memory Usage: 30.9 MB, less than 34.48% of Python online submissions for Palindrome Linked List.
slow = fast = head
## get midpoint
while fast and fast.next:
fast = fast.next.next
slow = slow.next
print(" slow = {}".format(slow.val))
slow = self.reverse(slow)
self.printAllnode(slow)
while head and slow:
if head.val != slow.val:
return False
else:
head = head.next
slow = slow.next
return True
def reverse(self,head):
prev = None
next = None
while head:
next = head.next
head.next = prev
prev = head
head = next
return prev
def printAllnode(self,head):
while head:
print(head.val)
head = head.next
def main():
a = Solution()
node1 = ListNode(1)
node2 = ListNode(3)
node3 = ListNode(1)
node4 = ListNode(3)
node5 = ListNode(1)
node1.next = node2
node2.next = node3
node3.next = node4
node4.next = node5
ans = a.isPalindrome(node1)
#node1 = a.reverse(node1)
print(ans)
#a.printAllnode(node1)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
bd9c12207e291abf0c6e8140048b593d0520e4c7
|
9b20743ec6cd28d749a4323dcbadb1a0cffb281b
|
/07_Machine_Learning_Mastery_with_Python/05/correlation_matrix_generic.py
|
bef6f583b5a4eab07926d46ad4579350536cf8f3
|
[] |
no_license
|
jggrimesdc-zz/MachineLearningExercises
|
6e1c7e1f95399e69bba95cdfe17c4f8d8c90d178
|
ee265f1c6029c91daff172b3e7c1a96177646bc5
|
refs/heads/master
| 2023-03-07T19:30:26.691659 | 2021-02-19T08:00:49 | 2021-02-19T08:00:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 440 |
py
|
# Correlation Matrix Plot (generic)
from matplotlib import pyplot
from pandas import read_csv
filename = 'pima-indians-diabetes.data.csv'
names = ['preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age', 'class']
data = read_csv(filename, names=names)
correlations = data.corr()
# plot correlation matrix
fig = pyplot.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(correlations, vmin=-1, vmax=1)
fig.colorbar(cax)
pyplot.show()
|
[
"[email protected]"
] | |
0d293c66679798c3156d7d4d81887dd17635b22c
|
e21f7d14e564d7fb921277a329ff078e86ad86a2
|
/2018/Day 03/day3_part1.py
|
9608667c4f614f00c804d40b776d5d7de8749d61
|
[] |
no_license
|
MrGallo/advent-of-code-solutions
|
31456a0718303cca6790cf1227831bcb14649e27
|
28e0331e663443ffa2638188437cc7e46d09f465
|
refs/heads/master
| 2022-07-07T08:49:30.460166 | 2020-12-17T17:22:24 | 2020-12-17T17:22:24 | 160,988,019 | 0 | 1 | null | 2022-06-21T22:26:19 | 2018-12-08T23:34:51 |
Python
|
UTF-8
|
Python
| false | false | 523 |
py
|
import numpy as np
with open('input.txt') as f:
claims = []
for claim in f:
c_id = int(claim[1:claim.index(" ")])
at_i = claim.index("@")
colon_i = claim.index(":")
x, y = [int(n) for n in claim[at_i+2:colon_i].split(",")]
w, h = [int(n) for n in claim[colon_i+2:].split("x")]
claims.append((c_id, x, y, w, h))
grid = np.zeros((1000, 1000), dtype=int)
# Place claims on grid
for c_id, x, y, w, h in claims:
grid[y:y+h, x:x+w] += 1
print((grid > 1).sum())
|
[
"[email protected]"
] | |
d83b6bb921f87ceb9e99f7fb3acba053b8c15752
|
cd876d32aa66112892dc9550837ad843e3e03afd
|
/env_carzone/Lib/site-packages/django/core/management/__init__.py
|
6e57539c88e529768febe95c6f6521919faf1d0d
|
[
"BSD-3-Clause"
] |
permissive
|
viplavdube/Car-Yard-App
|
7665b7e6e54f3b0e4a4da563151f85d65c225cef
|
65381a50f828e80f31d25d4f35e497f51c2d224d
|
refs/heads/master
| 2023-04-19T03:49:18.991604 | 2021-04-27T17:51:10 | 2021-04-27T17:51:10 | 349,094,392 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 16,801 |
py
|
import functools
import os
import pkgutil
import sys
from argparse import _SubParsersAction
from collections import defaultdict
from difflib import get_close_matches
from importlib import import_module
import django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import (
BaseCommand,
CommandError,
CommandParser,
handle_default_options,
)
from django.core.management.color import color_style
from django.utils import autoreload
def find_commands(management_dir):
"""
Given a path to a management directory, return a list of all the command
names that are available.
"""
command_dir = os.path.join(management_dir, "commands")
return [
name
for _, name, is_pkg in pkgutil.iter_modules([command_dir])
if not is_pkg and not name.startswith("_")
]
def load_command_class(app_name, name):
"""
Given a command name and an application name, return the Command
class instance. Allow all errors raised by the import process
(ImportError, AttributeError) to propagate.
"""
module = import_module("%s.management.commands.%s" % (app_name, name))
return module.Command()
@functools.lru_cache(maxsize=None)
def get_commands():
"""
Return a dictionary mapping command names to their callback applications.
Look for a management.commands package in django.core, and in each
installed application -- if a commands package exists, register all
commands in that package.
Core commands are always included. If a settings module has been
specified, also include user-defined commands.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
"""
commands = {name: "django.core" for name in find_commands(__path__[0])}
if not settings.configured:
return commands
for app_config in reversed(list(apps.get_app_configs())):
path = os.path.join(app_config.path, "management")
commands.update({name: app_config.name for name in find_commands(path)})
return commands
def call_command(command_name, *args, **options):
"""
Call the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
`command_name` may be a string or a command object. Using a string is
preferred unless the command object is required for further processing or
testing.
Some examples:
call_command('migrate')
call_command('shell', plain=True)
call_command('sqlmigrate', 'myapp')
from django.core.management.commands import flush
cmd = flush.Command()
call_command(cmd, verbosity=0, interactive=False)
# Do something with cmd ...
"""
if isinstance(command_name, BaseCommand):
# Command object passed in.
command = command_name
command_name = command.__class__.__module__.split(".")[-1]
else:
# Load the command object by name.
try:
app_name = get_commands()[command_name]
except KeyError:
raise CommandError("Unknown command: %r" % command_name)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
command = app_name
else:
command = load_command_class(app_name, command_name)
# Simulate argument parsing to get the option defaults (see #10080 for details).
parser = command.create_parser("", command_name)
# Use the `dest` option name from the parser option
opt_mapping = {
min(s_opt.option_strings).lstrip("-").replace("-", "_"): s_opt.dest
for s_opt in parser._actions
if s_opt.option_strings
}
arg_options = {opt_mapping.get(key, key): value for key, value in options.items()}
parse_args = [str(a) for a in args]
def get_actions(parser):
# Parser actions and actions from sub-parser choices.
for opt in parser._actions:
if isinstance(opt, _SubParsersAction):
for sub_opt in opt.choices.values():
yield from get_actions(sub_opt)
else:
yield opt
parser_actions = list(get_actions(parser))
mutually_exclusive_required_options = {
opt
for group in parser._mutually_exclusive_groups
for opt in group._group_actions
if group.required
}
# Any required arguments which are passed in via **options must be passed
# to parse_args().
parse_args += [
"{}={}".format(min(opt.option_strings), arg_options[opt.dest])
for opt in parser_actions
if (
opt.dest in options
and (opt.required or opt in mutually_exclusive_required_options)
)
]
defaults = parser.parse_args(args=parse_args)
defaults = dict(defaults._get_kwargs(), **arg_options)
# Raise an error if any unknown options were passed.
stealth_options = set(command.base_stealth_options + command.stealth_options)
dest_parameters = {action.dest for action in parser_actions}
valid_options = (dest_parameters | stealth_options).union(opt_mapping)
unknown_options = set(options) - valid_options
if unknown_options:
raise TypeError(
"Unknown option(s) for %s command: %s. "
"Valid options are: %s."
% (
command_name,
", ".join(sorted(unknown_options)),
", ".join(sorted(valid_options)),
)
)
# Move positional args out of options to mimic legacy optparse
args = defaults.pop("args", ())
if "skip_checks" not in options:
defaults["skip_checks"] = True
return command.execute(*args, **defaults)
class ManagementUtility:
"""
Encapsulate the logic of the django-admin and manage.py utilities.
"""
def __init__(self, argv=None):
self.argv = argv or sys.argv[:]
self.prog_name = os.path.basename(self.argv[0])
if self.prog_name == "__main__.py":
self.prog_name = "python -m django"
self.settings_exception = None
def main_help_text(self, commands_only=False):
"""Return the script's main help text, as a string."""
if commands_only:
usage = sorted(get_commands())
else:
usage = [
"",
"Type '%s help <subcommand>' for help on a specific subcommand."
% self.prog_name,
"",
"Available subcommands:",
]
commands_dict = defaultdict(lambda: [])
for name, app in get_commands().items():
if app == "django.core":
app = "django"
else:
app = app.rpartition(".")[-1]
commands_dict[app].append(name)
style = color_style()
for app in sorted(commands_dict):
usage.append("")
usage.append(style.NOTICE("[%s]" % app))
for name in sorted(commands_dict[app]):
usage.append(" %s" % name)
# Output an extra note if settings are not properly configured
if self.settings_exception is not None:
usage.append(
style.NOTICE(
"Note that only Django core commands are listed "
"as settings are not properly configured (error: %s)."
% self.settings_exception
)
)
return "\n".join(usage)
def fetch_command(self, subcommand):
"""
Try to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin" or "manage.py") if it can't be found.
"""
# Get commands outside of try block to prevent swallowing exceptions
commands = get_commands()
try:
app_name = commands[subcommand]
except KeyError:
if os.environ.get("DJANGO_SETTINGS_MODULE"):
# If `subcommand` is missing due to misconfigured settings, the
# following line will retrigger an ImproperlyConfigured exception
# (get_commands() swallows the original one) so the user is
# informed about it.
settings.INSTALLED_APPS
else:
sys.stderr.write("No Django settings specified.\n")
possible_matches = get_close_matches(subcommand, commands)
sys.stderr.write("Unknown command: %r" % subcommand)
if possible_matches:
sys.stderr.write(". Did you mean %s?" % possible_matches[0])
sys.stderr.write("\nType '%s help' for usage.\n" % self.prog_name)
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, subcommand)
return klass
def autocomplete(self):
"""
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, an equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
"""
# Don't complete if user hasn't sourced bash_completion file.
if "DJANGO_AUTO_COMPLETE" not in os.environ:
return
cwords = os.environ["COMP_WORDS"].split()[1:]
cword = int(os.environ["COMP_CWORD"])
try:
curr = cwords[cword - 1]
except IndexError:
curr = ""
subcommands = [*get_commands(), "help"]
options = [("--help", False)]
# subcommand
if cword == 1:
print(" ".join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
# subcommand options
# special case: the 'help' subcommand has no options
elif cwords[0] in subcommands and cwords[0] != "help":
subcommand_cls = self.fetch_command(cwords[0])
# special case: add the names of installed apps to options
if cwords[0] in ("dumpdata", "sqlmigrate", "sqlsequencereset", "test"):
try:
app_configs = apps.get_app_configs()
# Get the last part of the dotted path as the app name.
options.extend((app_config.label, 0) for app_config in app_configs)
except ImportError:
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
# user will find out once they execute the command.
pass
parser = subcommand_cls.create_parser("", cwords[0])
options.extend(
(min(s_opt.option_strings), s_opt.nargs != 0)
for s_opt in parser._actions
if s_opt.option_strings
)
# filter out previously specified options from available options
prev_opts = {x.split("=")[0] for x in cwords[1 : cword - 1]}
options = (opt for opt in options if opt[0] not in prev_opts)
# filter options by current input
options = sorted((k, v) for k, v in options if k.startswith(curr))
for opt_label, require_arg in options:
# append '=' to options which require args
if require_arg:
opt_label += "="
print(opt_label)
# Exit code of the bash completion function is never passed back to
# the user, so it's safe to always exit with 0.
# For more details see #25420.
sys.exit(0)
def execute(self):
"""
Given the command-line arguments, figure out which subcommand is being
run, create a parser appropriate to that command, and run it.
"""
try:
subcommand = self.argv[1]
except IndexError:
subcommand = "help" # Display help if no arguments were given.
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = CommandParser(
usage="%(prog)s subcommand [options] [args]",
add_help=False,
allow_abbrev=False,
)
parser.add_argument("--settings")
parser.add_argument("--pythonpath")
parser.add_argument("args", nargs="*") # catch-all
try:
options, args = parser.parse_known_args(self.argv[2:])
handle_default_options(options)
except CommandError:
pass # Ignore any option errors at this point.
try:
settings.INSTALLED_APPS
except ImproperlyConfigured as exc:
self.settings_exception = exc
except ImportError as exc:
self.settings_exception = exc
if settings.configured:
# Start the auto-reloading dev server even if the code is broken.
# The hardcoded condition is a code smell but we can't rely on a
# flag on the command class because we haven't located it yet.
if subcommand == "runserver" and "--noreload" not in self.argv:
try:
autoreload.check_errors(django.setup)()
except Exception:
# The exception will be raised later in the child process
# started by the autoreloader. Pretend it didn't happen by
# loading an empty list of applications.
apps.all_models = defaultdict(dict)
apps.app_configs = {}
apps.apps_ready = apps.models_ready = apps.ready = True
# Remove options not compatible with the built-in runserver
# (e.g. options for the contrib.staticfiles' runserver).
# Changes here require manually testing as described in
# #27522.
_parser = self.fetch_command("runserver").create_parser(
"django", "runserver"
)
_options, _args = _parser.parse_known_args(self.argv[2:])
for _arg in _args:
self.argv.remove(_arg)
# In all other cases, django.setup() is required to succeed.
else:
django.setup()
self.autocomplete()
if subcommand == "help":
if "--commands" in args:
sys.stdout.write(self.main_help_text(commands_only=True) + "\n")
elif not options.args:
sys.stdout.write(self.main_help_text() + "\n")
else:
self.fetch_command(options.args[0]).print_help(
self.prog_name, options.args[0]
)
# Special-cases: We want 'django-admin --version' and
# 'django-admin --help' to work, for backwards compatibility.
elif subcommand == "version" or self.argv[1:] == ["--version"]:
sys.stdout.write(django.get_version() + "\n")
elif self.argv[1:] in (["--help"], ["-h"]):
sys.stdout.write(self.main_help_text() + "\n")
else:
self.fetch_command(subcommand).run_from_argv(self.argv)
def execute_from_command_line(argv=None):
"""Run a ManagementUtility."""
utility = ManagementUtility(argv)
utility.execute()
|
[
"[email protected]"
] | |
db71f979debd03e7f83d2826f8d2638ade23dd94
|
32b0f6aa3f52db84099205a3f6b59464469617c8
|
/aa/unit/unit.py
|
9bb67ee8fe20a7ea6515a6293b06f499bf44489c
|
[
"MIT"
] |
permissive
|
projectweekend/aa
|
9eed3b75153cee74990640e9a40475172d5175b3
|
5c6da28121306b1125b2734d5a96677b3e3786e0
|
refs/heads/master
| 2022-01-29T11:34:22.467002 | 2018-09-04T02:33:43 | 2018-09-04T02:33:43 | 126,966,057 | 0 | 0 |
MIT
| 2022-01-21T19:18:28 | 2018-03-27T09:56:16 |
Python
|
UTF-8
|
Python
| false | false | 2,248 |
py
|
from random import randint
from .info import *
class Bonus:
def __init__(self, targets, boosted_attribute, boost_value):
self.targets = targets
self.boosted_attribute = boosted_attribute
self.boost_value = boost_value
class Roll:
def __init__(self, unit):
self._unit = unit
def _roll(self):
return randint(1, 6)
def attack(self):
roll = self._roll()
if roll <= self._unit.attack:
return roll, 1
return roll, 0
def defense(self):
roll = self._roll()
if roll <= self._unit.defense:
return roll, 1
return roll, 0
class Rank:
def __init__(self, unit):
self._unit = unit
@property
def attack(self):
return self._unit.attack + self._unit.cost + self.bonus
@property
def defense(self):
return self._unit.defense + self._unit.cost + self.bonus
@property
def bonus(self):
return 1.5 if self._unit.bonuses_granted else 0
class Unit:
def __init__(self, name, attack, defense, cost, movement, type,
bonuses_granted, active_bonus=None):
self.name = name
self._attack = attack
self._defense = defense
self.cost = cost
self.movement = movement
self.type = type
self.bonuses_granted = bonuses_granted
self.active_bonus = active_bonus
self.roll = Roll(unit=self)
self.rank = Rank(unit=self)
def __repr__(self):
return self.name
@classmethod
def build_by_name(cls, name):
unit_args = UNIT_INFO[name.title()]
kwargs = dict(unit_args)
kwargs[BONUSES] = [Bonus(**e) for e in unit_args[BONUSES]]
return cls(**kwargs)
@property
def attack(self):
if self.active_bonus is None:
return self._attack
if self.active_bonus.boosted_attribute != ATTACK:
return self._attack
return self.active_bonus.boost_value
@property
def defense(self):
if self.active_bonus is None:
return self._defense
if self.active_bonus.boosted_attribute != DEFENSE:
return self._defense
return self.active_bonus.boost_value
|
[
"[email protected]"
] | |
a57790abe4d9ce8e9cc97d8ff297729b1c3ab13c
|
b1892cfaba853689e6db9bff2cc930e7472e4a67
|
/src/Calculating_Synthetic_Networks_Organize_Runs.py
|
c4b2538e7428c0ee41c0da5948e6d6850c392a69
|
[] |
no_license
|
jeffalstott/technologyspace
|
0cb01655cec4d8c18c5496ecb1b621eeac4a3648
|
34185de439d58830392aaeadc61c2c59ccd84afa
|
refs/heads/master
| 2020-12-09T13:56:54.386249 | 2018-04-30T13:56:43 | 2018-04-30T13:56:43 | 46,704,128 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,648 |
py
|
# coding: utf-8
# In[2]:
import pandas as pd
from pylab import *
# In[3]:
# class_system = 'IPC'
# n_controls = 1000
# target_year = 2010
# In[4]:
# n_years = 'cumulative'
if n_years is None or n_years=='all' or n_years=='cumulative':
n_years_label = ''
else:
n_years_label = '%i_years_'%n_years
# In[5]:
# occurrence_entities = {'Firm': ('occurrences_organized.h5', 'entity_classes_Firm'),
# 'Inventor': ('occurrences_organized.h5', 'entity_classes_Inventor'),
# 'Country': ('occurrences_organized.h5', 'entity_classes_Country'),
# 'PID': ('classifications_organized.h5', 'patent_classes'),
# }
# entity_types = list(occurrence_entities.keys())
# In[6]:
# cooccurrence_base_file_name = 'synthetic_control_cooccurrence_'+n_years_label+'%s_preserve_years_%s'
citations_base_file_name = 'synthetic_control_citations_'+n_years_label+'%s'
# In[7]:
# data_directory = '../data/'
citations_controls_directory = data_directory+'Class_Relatedness_Networks/citations/controls/%s/'%class_system
coocurrence_controls_directory = data_directory+'Class_Relatedness_Networks/cooccurrence/controls/%s/'%class_system
# In[8]:
import gc
from time import time
# In[9]:
def organize_runs(df_name,
file_name,
controls_directory=citations_controls_directory,
n_controls=n_controls,
target_year=target_year,
controls=None,
multiple_metrics=True,
target_metric=None
):
t = time()
for randomization_id in range(n_controls):
if not randomization_id%100:
print(randomization_id)
print("%.0f seconds"%(time()-t))
t = time()
f = '%s_%i.h5'%(file_name, randomization_id)
try:
if multiple_metrics:
x = pd.read_hdf(controls_directory+f, df_name).ix[:,target_year]
else:
x = pd.read_hdf(controls_directory+f, df_name).ix[target_year]
except:
print("Data not loading for %s. Continuing."%f)
continue
if controls is None:
controls = pd.Panel4D(labels=x.items, items=arange(n_controls),
major_axis=x.major_axis, minor_axis=x.minor_axis)
if multiple_metrics:
controls.ix[x.items, randomization_id] = x.values
else:
controls.ix[target_metric, randomization_id] = x
gc.collect()
return controls
# In[10]:
controls = organize_runs('synthetic_citations_%s'%class_system,
citations_base_file_name%class_system,
citations_controls_directory,
controls=None
)
# In[18]:
for entity in entity_types:
controls = organize_runs('synthetic_cooccurrence_%s_%s'%(entity, class_system),
cooccurrence_base_file_name%(entity, class_system),
coocurrence_controls_directory,
controls=controls,
multiple_metrics=False,
target_metric='Class_CoOccurrence_Count_%s'%entity)
# In[20]:
store.close()
# In[21]:
store = pd.HDFStore(data_directory+'Class_Relatedness_Networks/class_relatedness_networks_controls_organized_%s.h5'%class_system,
mode='a', table=True)
store.put('/controls_%s'%class_system, controls, 'table', append=False)
store.close()
|
[
"[email protected]"
] | |
e85cf3dcfa4ce7b1ff0c56d2b706b9c987245f76
|
0d8d40eba3eb0b54e6e7346c5e8160d922b0953f
|
/alalala_17403/urls.py
|
cc63fdcba3efd3f3eabe93788f5d84dcb48ae98d
|
[] |
no_license
|
crowdbotics-apps/alalala-17403
|
e96a2e1197965e43105873efd82c37e9649b9d4f
|
bae7b1f3b1fd7a12a6a8ac34073565b5f3ddcfa2
|
refs/heads/master
| 2023-05-19T02:29:20.073969 | 2020-05-26T09:36:07 | 2020-05-26T09:36:07 | 267,001,767 | 0 | 0 | null | 2021-06-12T13:03:48 | 2020-05-26T09:34:37 |
Python
|
UTF-8
|
Python
| false | false | 2,046 |
py
|
"""alalala_17403 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
path("api/v1/", include("dating.api.v1.urls")),
path("dating/", include("dating.urls")),
path("home/", include("home.urls")),
]
admin.site.site_header = "alalala"
admin.site.site_title = "alalala Admin Portal"
admin.site.index_title = "alalala Admin"
# swagger
schema_view = get_schema_view(
openapi.Info(
title="alalala API",
default_version="v1",
description="API documentation for alalala App",
),
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
|
[
"[email protected]"
] | |
80f631cf3d47346c28c011a1d97a2ee7e6ad5594
|
7cad80770718e655766dd038bc654ebe1ad2ab3e
|
/network/fedoracoin.py
|
4daa642227ed53cbb62e9be465e0740e0ab9d2c5
|
[] |
no_license
|
kindanew/clove_bounty
|
d0e1f917dff5b3a23eae6a05d6449af068fb4dc9
|
e707eebde301ac3728a5835d10d927aa83d27ab5
|
refs/heads/master
| 2021-04-29T00:29:37.698092 | 2018-02-24T02:34:33 | 2018-02-24T02:34:33 | 121,830,043 | 0 | 0 | null | 2018-02-17T05:34:54 | 2018-02-17T05:26:51 |
Python
|
UTF-8
|
Python
| false | false | 822 |
py
|
from clove.network.bitcoin import Bitcoin
class FedoraCoin(Bitcoin):
"""
Class with all the necessary FedoraCoin (TIPS) network information based on
https://github.com/fedoracoin-dev/fedoracoin/blob/master-0.9/src/chainparams.cpp
(date of access: 02/17/2018)
"""
name = 'fedoracoin'
symbols = ('TIPS', )
seeds = ('seed.fedoracoin.net', '45.55.250.196', 'tips1.netcraft.ch' , 'tips2.netcraft.ch')
port = 44890
class FedoraCoinTestNet(FedoraCoin):
"""
Class with all the necessary FedoraCoin (TIPS) testing network information based on
https://github.com/fedoracoin-dev/fedoracoin/blob/master-0.9/src/chainparams.cpp
(date of access: 02/17/2018)
"""
name = 'test-fedoracoin'
seeds = ('115.29.37.248', 'testnet-dnsseed.fedoracoin.com')
port = 19336
|
[
"[email protected]"
] | |
d7e4f73db84e320d839e02f46066f13f6057272f
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startQiskit_noisy2986.py
|
e015914e151e5d9651cefff1550ef4347ec47356
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,210 |
py
|
# qubit number=4
# total number=39
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=31
prog.cz(input_qubit[0],input_qubit[3]) # number=32
prog.h(input_qubit[3]) # number=33
prog.h(input_qubit[3]) # number=30
prog.x(input_qubit[3]) # number=11
prog.h(input_qubit[3]) # number=13
prog.cz(input_qubit[0],input_qubit[3]) # number=14
prog.h(input_qubit[1]) # number=18
prog.cz(input_qubit[3],input_qubit[1]) # number=19
prog.z(input_qubit[3]) # number=25
prog.cx(input_qubit[0],input_qubit[3]) # number=36
prog.x(input_qubit[3]) # number=37
prog.cx(input_qubit[0],input_qubit[3]) # number=38
prog.h(input_qubit[1]) # number=20
prog.rx(-3.141592653589793,input_qubit[3]) # number=26
prog.h(input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[2]) # number=17
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[0]) # number=27
prog.cz(input_qubit[1],input_qubit[0]) # number=28
prog.h(input_qubit[0]) # number=29
prog.cx(input_qubit[1],input_qubit[0]) # number=22
prog.cx(input_qubit[2],input_qubit[1]) # number=34
prog.x(input_qubit[1]) # number=23
prog.x(input_qubit[1]) # number=24
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy2986.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
848a1c59e318fe3cedaa0d7702194ef1a1d6b211
|
a8062308fb3bf6c8952257504a50c3e97d801294
|
/problems/N538_Convert_BST_To_Greater_Tree.py
|
0cf76aad40516b7710da549a39a22762f8d82918
|
[] |
no_license
|
wan-catherine/Leetcode
|
650d697a873ad23c0b64d08ad525bf9fcdb62b1b
|
238995bd23c8a6c40c6035890e94baa2473d4bbc
|
refs/heads/master
| 2023-09-01T00:56:27.677230 | 2023-08-31T00:49:31 | 2023-08-31T00:49:31 | 143,770,000 | 5 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 936 |
py
|
class Solution(object):
def convertBST(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if not root:
return None
stack = [root]
prev = 0
visited = set()
visited.add(root)
visited.add(None)
while stack:
node = stack[-1]
if node.left in visited and node.right in visited:
node.val += prev
prev = node.val
stack.pop()
if node.right and node.right not in visited:
stack.append(node.right)
visited.add(node.right)
continue
if node.left and node.left not in visited:
node.val += prev
prev = node.val
stack.pop()
stack.append(node.left)
visited.add(node.left)
continue
return root
|
[
"[email protected]"
] | |
6521b280d000c57c00c0dacef579bcfd16457b95
|
aea8fea216234fd48269e4a1830b345c52d85de2
|
/fhir/resources/communicationrequest.py
|
114666d1c01e00de50e3e79b77fcd3abdd84ed86
|
[
"BSD-3-Clause"
] |
permissive
|
mmabey/fhir.resources
|
67fce95c6b35bfdc3cbbc8036e02c962a6a7340c
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
refs/heads/master
| 2023-04-12T15:50:30.104992 | 2020-04-11T17:21:36 | 2020-04-11T17:21:36 | 269,712,884 | 0 | 0 |
NOASSERTION
| 2020-06-05T17:03:04 | 2020-06-05T17:03:04 | null |
UTF-8
|
Python
| false | false | 13,537 |
py
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/CommunicationRequest
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import sys
from . import backboneelement, domainresource
class CommunicationRequest(domainresource.DomainResource):
""" A request for information to be sent to a receiver.
A request to convey information; e.g. the CDS system proposes that an alert
be sent to a responsible provider, the CDS system proposes that the public
health agency be notified about a reportable condition.
"""
resource_type = "CommunicationRequest"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.about = None
""" Resources that pertain to this communication request.
List of `FHIRReference` items referencing `['Resource']` (represented as `dict` in JSON). """
self.authoredOn = None
""" When request transitioned to being actionable.
Type `FHIRDate` (represented as `str` in JSON). """
self.basedOn = None
""" Fulfills plan or proposal.
List of `FHIRReference` items referencing `['Resource']` (represented as `dict` in JSON). """
self.category = None
""" Message category.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.doNotPerform = None
""" True if request is prohibiting action.
Type `bool`. """
self.encounter = None
""" Encounter created as part of.
Type `FHIRReference` referencing `['Encounter']` (represented as `dict` in JSON). """
self.groupIdentifier = None
""" Composite request this is part of.
Type `Identifier` (represented as `dict` in JSON). """
self.identifier = None
""" Unique identifier.
List of `Identifier` items (represented as `dict` in JSON). """
self.medium = None
""" A channel of communication.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.note = None
""" Comments made about communication request.
List of `Annotation` items (represented as `dict` in JSON). """
self.occurrenceDateTime = None
""" When scheduled.
Type `FHIRDate` (represented as `str` in JSON). """
self.occurrencePeriod = None
""" When scheduled.
Type `Period` (represented as `dict` in JSON). """
self.payload = None
""" Message payload.
List of `CommunicationRequestPayload` items (represented as `dict` in JSON). """
self.priority = None
""" routine | urgent | asap | stat.
Type `str`. """
self.reasonCode = None
""" Why is communication needed?.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.reasonReference = None
""" Why is communication needed?.
List of `FHIRReference` items referencing `['Condition', 'Observation', 'DiagnosticReport', 'DocumentReference']` (represented as `dict` in JSON). """
self.recipient = None
""" Message recipient.
List of `FHIRReference` items referencing `['Device', 'Organization', 'Patient', 'Practitioner', 'PractitionerRole', 'RelatedPerson', 'Group', 'CareTeam', 'HealthcareService']` (represented as `dict` in JSON). """
self.replaces = None
""" Request(s) replaced by this request.
List of `FHIRReference` items referencing `['CommunicationRequest']` (represented as `dict` in JSON). """
self.requester = None
""" Who/what is requesting service.
Type `FHIRReference` referencing `['Practitioner', 'PractitionerRole', 'Organization', 'Patient', 'RelatedPerson', 'Device']` (represented as `dict` in JSON). """
self.sender = None
""" Message sender.
Type `FHIRReference` referencing `['Device', 'Organization', 'Patient', 'Practitioner', 'PractitionerRole', 'RelatedPerson', 'HealthcareService']` (represented as `dict` in JSON). """
self.status = None
""" draft | active | on-hold | revoked | completed | entered-in-error |
unknown.
Type `str`. """
self.statusReason = None
""" Reason for current status.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.subject = None
""" Focus of message.
Type `FHIRReference` referencing `['Patient', 'Group']` (represented as `dict` in JSON). """
super(CommunicationRequest, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CommunicationRequest, self).elementProperties()
js.extend(
[
(
"about",
"about",
fhirreference.FHIRReference,
"Reference",
True,
None,
False,
),
(
"authoredOn",
"authoredOn",
fhirdate.FHIRDate,
"dateTime",
False,
None,
False,
),
(
"basedOn",
"basedOn",
fhirreference.FHIRReference,
"Reference",
True,
None,
False,
),
(
"category",
"category",
codeableconcept.CodeableConcept,
"CodeableConcept",
True,
None,
False,
),
("doNotPerform", "doNotPerform", bool, "boolean", False, None, False),
(
"encounter",
"encounter",
fhirreference.FHIRReference,
"Reference",
False,
None,
False,
),
(
"groupIdentifier",
"groupIdentifier",
identifier.Identifier,
"Identifier",
False,
None,
False,
),
(
"identifier",
"identifier",
identifier.Identifier,
"Identifier",
True,
None,
False,
),
(
"medium",
"medium",
codeableconcept.CodeableConcept,
"CodeableConcept",
True,
None,
False,
),
(
"note",
"note",
annotation.Annotation,
"Annotation",
True,
None,
False,
),
(
"occurrenceDateTime",
"occurrenceDateTime",
fhirdate.FHIRDate,
"dateTime",
False,
"occurrence",
False,
),
(
"occurrencePeriod",
"occurrencePeriod",
period.Period,
"Period",
False,
"occurrence",
False,
),
(
"payload",
"payload",
CommunicationRequestPayload,
"CommunicationRequestPayload",
True,
None,
False,
),
("priority", "priority", str, "code", False, None, False),
(
"reasonCode",
"reasonCode",
codeableconcept.CodeableConcept,
"CodeableConcept",
True,
None,
False,
),
(
"reasonReference",
"reasonReference",
fhirreference.FHIRReference,
"Reference",
True,
None,
False,
),
(
"recipient",
"recipient",
fhirreference.FHIRReference,
"Reference",
True,
None,
False,
),
(
"replaces",
"replaces",
fhirreference.FHIRReference,
"Reference",
True,
None,
False,
),
(
"requester",
"requester",
fhirreference.FHIRReference,
"Reference",
False,
None,
False,
),
(
"sender",
"sender",
fhirreference.FHIRReference,
"Reference",
False,
None,
False,
),
("status", "status", str, "code", False, None, True),
(
"statusReason",
"statusReason",
codeableconcept.CodeableConcept,
"CodeableConcept",
False,
None,
False,
),
(
"subject",
"subject",
fhirreference.FHIRReference,
"Reference",
False,
None,
False,
),
]
)
return js
class CommunicationRequestPayload(backboneelement.BackboneElement):
""" Message payload.
Text, attachment(s), or resource(s) to be communicated to the recipient.
"""
resource_type = "CommunicationRequestPayload"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contentAttachment = None
""" Message part content.
Type `Attachment` (represented as `dict` in JSON). """
self.contentReference = None
""" Message part content.
Type `FHIRReference` referencing `['Resource']` (represented as `dict` in JSON). """
self.contentString = None
""" Message part content.
Type `str`. """
super(CommunicationRequestPayload, self).__init__(
jsondict=jsondict, strict=strict
)
def elementProperties(self):
js = super(CommunicationRequestPayload, self).elementProperties()
js.extend(
[
(
"contentAttachment",
"contentAttachment",
attachment.Attachment,
"Attachment",
False,
"content",
True,
),
(
"contentReference",
"contentReference",
fhirreference.FHIRReference,
"Reference",
False,
"content",
True,
),
(
"contentString",
"contentString",
str,
"string",
False,
"content",
True,
),
]
)
return js
try:
from . import annotation
except ImportError:
annotation = sys.modules[__package__ + ".annotation"]
try:
from . import attachment
except ImportError:
attachment = sys.modules[__package__ + ".attachment"]
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + ".codeableconcept"]
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + ".fhirdate"]
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + ".fhirreference"]
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + ".identifier"]
try:
from . import period
except ImportError:
period = sys.modules[__package__ + ".period"]
|
[
"[email protected]"
] | |
5976b48f4a882bac42ead1b05645367c90e2f149
|
69bcc45028038351a7f891025df1f8e7d4b855f1
|
/unsupervised_learning/0x02-hmm/4-viterbi.py
|
d1b2dad6af9348540262ec79bcaa71f9e3174c85
|
[] |
no_license
|
linkjavier/holbertonschool-machine_learning
|
6db799844821d450fed2a33a8819cb8df0fef911
|
c7b6ea4c37b7c5dc41e63cdb8142b3cdfb3e1d23
|
refs/heads/main
| 2023-08-17T21:00:24.182003 | 2021-09-09T05:47:06 | 2021-09-09T05:47:06 | 304,503,773 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,642 |
py
|
#!/usr/bin/env python3
""" The Viretbi Algorithm """
import numpy as np
def viterbi(Observation, Emission, Transition, Initial):
""" Function that calculates the most likely sequence
of hidden states for a hidden markov model
"""
if not isinstance(Observation, np.ndarray) or len(Observation.shape) != 1:
return (None, None)
if not isinstance(Emission, np.ndarray) or len(Emission.shape) != 2:
return None, None
if not isinstance(Transition, np.ndarray) or len(
Transition.shape) != 2 or \
Transition.shape[0] != Transition.shape[1]:
return None, None
if not isinstance(Initial, np.ndarray) or len(Initial.shape) != 2:
return None, None
if Emission.shape[0] != Transition.shape[0] != Transition.shape[0] !=\
Initial.shape[0]:
return None, None
if Initial.shape[1] != 1:
return None, None
T = Observation.shape[0]
N, _ = Emission.shape
zeroMatrix = np.zeros([N, T])
f = np.empty([N, T], dtype=int)
zeroMatrix[:, 0] = np.multiply(Initial.T, Emission[:, Observation[0]])
for t in range(1, T):
for i in range(N):
zeroMatrix[i, t] = np.max(
zeroMatrix[:, t - 1] * Transition[:, i]) *\
Emission[i, Observation[t]]
f[i, t] = np.argmax(zeroMatrix[:, t - 1] * Transition[:, i])
path = np.zeros(T)
path[T - 1] = np.argmax(zeroMatrix[:, T - 1])
for i in range(T - 2, -1, -1):
path[i] = f[int(path[i + 1]), i + 1]
P = np.max(zeroMatrix[:, T - 1:], axis=0)[0]
path = [int(i) for i in path]
return (path, P)
|
[
"[email protected]"
] | |
d574c0c5bff1863e1788e44fe024613dc1f96569
|
1f21f836d8eaf9e68ef102612787912011dafa4b
|
/bc_study/two_stock_datafeed.py
|
e104c8f48858dec4b210c9d1ffb5b05f877e576b
|
[] |
no_license
|
abcijkxyz/think_in_backtrader
|
2461b20dc24bff11c1cd2693a74da1a1c28b7bd1
|
1645a7527d24929a900755186c18efb65b3a1672
|
refs/heads/main
| 2023-03-09T06:46:05.943303 | 2021-02-28T10:26:52 | 2021-02-28T10:26:52 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,741 |
py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : two_stock_datafeed.py
@Time : 2020/11/27 13:33:27
@Author : Jeffrey Wang
@Version : 1.0
@Contact : [email protected]
@Desc : 同时读取两个股票DataFeed的示例
读取两个股票的日线数据,每日输出close价格
'''
import backtrader as bt
import bc_study.tushare_csv_datafeed as ts_df
# 演示用策略,每日输出开盘价
class DemoStrategy(bt.Strategy):
def log(self, txt, dt=None):
''' Logging function for this strategy'''
dt = dt or self.datas[0].datetime.date(0)
print('%s, %s' % (dt.isoformat(), txt))
def __init__(self):
# 建立对于DataFeed的Open/Close价格的引用参数
self.dataclose_A = self.datas[0].close
self.dataclose_B = self.datas[1].close
def next(self):
self.log('A Close={0}'.format(self.dataclose_A[0]))
self.log('B Close={0}'.format(self.dataclose_B[0]))
# 启动回测
def engine_run():
# 初始化引擎
cerebro = bt.Cerebro()
# 给Cebro引擎添加策略
cerebro.addstrategy(DemoStrategy)
# 设置初始资金:
cerebro.broker.setcash(200000.0)
# 加载两个股票的数据
data1 = ts_df.get_csv_daily_data(stock_id="600016.SH", start="20190101", end="20190105")
cerebro.adddata(data1)
data2 = ts_df.get_csv_daily_data(stock_id="000001.SZ", start="20190101", end="20190105")
cerebro.adddata(data2)
print('初始市值: %.2f' % cerebro.broker.getvalue())
# 回测启动运行
result = cerebro.run()
print("回测运行返回值 = {0}".format(result))
print('期末市值: %.2f' % cerebro.broker.getvalue())
if __name__ == '__main__':
engine_run()
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.