blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
659225b6d0ed14ffec7d992922f69f21209fb358 | 1f468bc6edd2619c4fcc213f44fc64db129eaa51 | /tf_agents/bandits/policies/linalg.py | 8d54bd8cbfb1f5f4c655288016339d79ba97f24e | [
"Apache-2.0"
] | permissive | sanket-kamthe/agents | 8ae89da7d581d6d9165ad87ad10d10375e7f4d68 | 5876d78cacaec1cf5994f93ad0aa48f347f7fff0 | refs/heads/master | 2021-06-21T03:09:46.953822 | 2021-02-05T22:19:15 | 2021-02-05T22:19:15 | 181,042,465 | 2 | 0 | Apache-2.0 | 2021-02-05T22:19:16 | 2019-04-12T16:12:06 | Python | UTF-8 | Python | false | false | 7,331 | py | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility code for linear algebra functions."""
from __future__ import absolute_import
from __future__ import division
# Using Type Annotations.
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.typing import types
from tf_agents.utils import common
def _cg_check_shapes(a_mat, b):
if a_mat.shape[0] != a_mat.shape[1] or a_mat.shape.rank != 2:
raise ValueError('`a_mat` must be rank 2 square matrix; '
'got shape {}.'.format(a_mat.shape))
if a_mat.shape[1] != b.shape[0]:
raise ValueError('The dims of `a_mat` and `b` are not compatible; '
'got shapes {} and {}.'.format(a_mat.shape, b.shape))
@common.function
def conjugate_gradient(a_mat: types.Tensor,
b: types.Tensor,
tol: float = 1e-10) -> types.Float:
"""Returns `x` such that `A * x = b`.
Implements the Conjugate Gradient method.
https://en.wikipedia.org/wiki/Conjugate_gradient_method
Args:
a_mat: a Symmetric Positive Definite matrix, represented as a `Tensor` of
shape `[n, n]`.
b: a `Tensor` of shape `[n, 1]`.
tol: (float) desired tolerance on the residual.
Returns:
x: `Tensor` `x` of shape `[n, 1]` such that `A * x = b`.
Raises:
ValueError: if `a_mat` is not square or `a_mat` and `b` have incompatible
shapes.
"""
_cg_check_shapes(a_mat, b)
n = tf.shape(b)[0]
x = tf.zeros_like(b)
r = b - tf.matmul(a_mat, x)
p = r
rs_old = tf.reduce_sum(r * r)
rs_new = rs_old
def body_fn(i, x, p, r, rs_old, rs_new):
"""One iteration of CG."""
a_x_p = tf.matmul(a_mat, p)
alpha = rs_old / tf.reduce_sum(p * a_x_p)
x = x + alpha * p
r = r - alpha * a_x_p
rs_new = tf.reduce_sum(r * r)
p = r + (rs_new / rs_old) * p
rs_old = rs_new
i = i + 1
return i, x, p, r, rs_old, rs_new
def while_exit_cond(i, x, p, r, rs_old, rs_new):
"""Exit the loop when n is reached or when the residual becomes small."""
del x # unused
del p # unused
del r # unused
del rs_old # unused
i_cond = tf.less(i, n)
residual_cond = tf.greater(tf.sqrt(rs_new), tol)
return tf.logical_and(i_cond, residual_cond)
_, x, _, _, _, _ = tf.while_loop(
while_exit_cond,
body_fn,
[tf.constant(0), x, p, r, rs_old, rs_new],
parallel_iterations=1)
return x
@common.function
def conjugate_gradient_solve(a_mat: types.Tensor,
b_mat: types.Tensor,
tol: float = 1e-10) -> types.Tensor:
"""Returns `X` such that `A * X = B`.
Uses Conjugate Gradient to solve many linear systems of equations with the
same matrix `a_mat` and multiple right hand sides provided as columns in
the matrix `b_mat`.
Args:
a_mat: a Symmetric Positive Definite matrix, represented as a `Tensor` of
shape `[n, n]`.
b_mat: a `Tensor` of shape `[n, k]`.
tol: (float) desired tolerance on the residual.
Returns:
X: `Tensor` `X` of shape `[n, k]` such that `A * X = B`.
Raises:
ValueError: if `a_mat` is not square or `a_mat` and `b_mat` have
incompatible shapes.
"""
# Allows for flexible shape handling. If the shape is statically known, it
# will use the first part. If the shape is not statically known, tf.shape()
# will be used.
n = tf.compat.dimension_value(b_mat.shape[0]) or tf.shape(b_mat)[0]
k = tf.compat.dimension_value(b_mat.shape[1]) or tf.shape(b_mat)[1]
x = tf.zeros_like(b_mat)
def body_fn(i, x):
"""Solve one linear system of equations with the `i`-th column of b_mat."""
b_vec = tf.slice(b_mat, begin=[0, i], size=[n, 1])
x_sol = conjugate_gradient(a_mat, b_vec, tol)
indices = tf.concat([tf.reshape(tf.range(n, dtype=tf.int32), [n, 1]),
i * tf.ones([n, 1], dtype=tf.int32)], axis=-1)
x = tf.tensor_scatter_nd_update(
tensor=x, indices=indices, updates=tf.squeeze(x_sol, 1))
x.set_shape(b_mat.shape)
i = i + 1
return i, x
_, x = tf.while_loop(
lambda i, _: i < k,
body_fn,
loop_vars=[tf.constant(0), x],
parallel_iterations=10)
return x
def _check_shapes(a_inv: types.Tensor, u: types.Tensor):
if a_inv.shape[0] != a_inv.shape[1] or a_inv.shape.rank != 2:
raise ValueError('`a_inv` must be rank 2 square matrix; '
'got shape {}.'.format(a_inv.shape))
if u.shape.rank != 2:
raise ValueError('`u` must be rank 2 matrix; '
'got shape {}.'.format(u.shape))
if a_inv.shape[1] != u.shape[1]:
raise ValueError('`a_inv` and `u` must have shapes [m, m] and [n, m]; '
'got shapes {} and {}.'.format(a_inv.shape, u.shape))
def simplified_woodbury_update(a_inv: types.Float,
u: types.Float) -> types.Float:
"""Returns `w` such that `inverse(a + u.T.dot(u)) = a_inv + w`.
Makes use of the Woodbury matrix identity. See
https://en.wikipedia.org/wiki/Woodbury_matrix_identity.
**NOTE**: This implementation assumes that a_inv is symmetric. Since it's too
expensive to check symmetricity, the function silently outputs a wrong answer
in case `a` is not symmetric.
Args:
a_inv: an invertible SYMMETRIC `Tensor` of shape `[m, m]`.
u: a `Tensor` of shape `[n, m]`.
Returns:
A `Tensor` `w` of shape `[m, m]` such that
`inverse(a + u.T.dot(u)) = a_inv + w`.
Raises:
ValueError: if `a_inv` is not square or `a_inv` and `u` have incompatible
shapes.
"""
_check_shapes(a_inv, u)
u_x_a_inv = tf.matmul(u, a_inv)
capacitance = (
tf.eye(tf.shape(u)[0], dtype=u.dtype) +
tf.matmul(u_x_a_inv, u, transpose_b=True))
return -1. * tf.matmul(
u_x_a_inv, tf.linalg.solve(capacitance, u_x_a_inv), transpose_a=True)
def update_inverse(a_inv: types.Float, x: types.Float) -> types.Float:
"""Updates the inverse using the Woodbury matrix identity.
Given a matrix `A` of size d-by-d and a matrix `X` of size k-by-d, this
function computes the inverse of B = A + X^T X, assuming that the inverse of
A is available.
Reference:
https://en.wikipedia.org/wiki/Woodbury_matrix_identity
Args:
a_inv: a `Tensor` of shape [`d`, `d`]. This is the current inverse of `A`.
x: a `Tensor` of shape [`k`, `d`].
Returns:
The update that needs to be added to 'a_inv' to compute the inverse.
If `x` is empty, a zero matrix is returned.
"""
batch_size = tf.shape(x)[0]
def true_fn():
return tf.zeros_like(a_inv)
def false_fn():
return simplified_woodbury_update(a_inv, x)
a_inv_update = tf.cond(tf.equal(batch_size, 0), true_fn, false_fn)
return a_inv_update
| [
"[email protected]"
] | |
49de6098f4d8b05a00fbb57f00b1af5d4ea13289 | 3bdff0b728d8754a9a424c5eae79f1bc417d8728 | /pi_db/client/entity/UserEntity.py | 7ccab845eb7258395a5196a071d26e75d43dc305 | [] | no_license | wangshiyu/pi | 92e8ac753f8de878d080a7dd0eecd8aa2a276d18 | e6c42f2c24407e5857bde4ba777ef78e88cb9263 | refs/heads/master | 2023-02-12T12:05:27.525452 | 2021-01-12T13:57:30 | 2021-01-12T13:57:30 | 291,059,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | # encoding: utf-8
# !/usr/bin/python3
from sqlalchemy import Column, Integer, String
from pi_db.client.entity.ClientBaseEntity import ClientBaseEntity
from sqlalchemy.ext.declarative import declarative_base
DeclarativeBase = declarative_base()
"""
UserEntity对象
用户表
"""
class UserEntity(DeclarativeBase, ClientBaseEntity):
# 表的名字:
__tablename__ = 'tb_user'
# 表的结构:
name = Column(String(12)) # 名称
password = Column(String(32)) # 密码
privilegeLevel = Column(Integer, name='privilege_level', default=0) # 权限等级
| [
"[email protected]"
] | |
9227a7b8cf65dae2dea286079ee8966be6e1d118 | c6abe311e9bc57fbe2454983c30bdd46e1feaf0d | /python/8-practiceProblemI.py | 9c2bce2f10885c7434ac9e8dbce3714c04cb5138 | [] | no_license | muon012/practiceProblems | 5966241d0af6c396bc52b2adac6d180e734a322d | 8dde7810212ed79745ab06320c5d96ce179db1bd | refs/heads/master | 2020-04-02T15:43:45.496026 | 2019-07-10T18:55:45 | 2019-07-10T18:55:45 | 154,581,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | # Hackerrank Challenge
if __name__ == '__main__':
# py_students = [['Harry', 37.21], ['Berry', 37.21], ['Tina', 37.2], ['Akriti', 41], ['Harsh', 39]]
py_students = []
print("For this program, you will enter the name of the student followed by its grade. All values are separated by"
" commas. The program will output all the students who have the second lowest grades.")
for _ in range(int(input("How many students will you enter in this program? "))):
name = input()
score = int(input())
student = [name, score]
py_students.append(student)
l = len(py_students)
for i in range(l):
for j in range(l - i - 1):
if py_students[j][1] > py_students[j + 1][1]:
py_students[j], py_students[j + 1] = py_students[j + 1], py_students[j]
second_lowest = 0
for k in range(l):
if py_students[k][1] > py_students[0][1]:
second_lowest = py_students[k][1]
break
second_lowest_students = []
for student in py_students:
if student[1] == second_lowest:
second_lowest_students.append(student)
for student in sorted(second_lowest_students):
print(student[0])
| [
"[email protected]"
] | |
c3e6459704cdacd14fbef32550033a2c7b5cdc7c | 0e5291f09c5117504447cc8df683ca1506b70560 | /test/test_ip_address_interface.py | 0743121b18b16fb3ace5cfe5aad58f0e6bd725fc | [
"MIT"
] | permissive | nrfta/python-netbox-client | abd0192b79aab912325485bf4e17777a21953c9b | 68ba6dd4d7306513dc1ad38f3ac59122ba4f70a8 | refs/heads/master | 2022-11-13T16:29:02.264187 | 2020-07-05T18:06:42 | 2020-07-05T18:06:42 | 277,121,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 879 | py | # coding: utf-8
"""
NetBox API
API to access NetBox # noqa: E501
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import netbox_client
from netbox_client.models.ip_address_interface import IPAddressInterface # noqa: E501
from netbox_client.rest import ApiException
class TestIPAddressInterface(unittest.TestCase):
"""IPAddressInterface unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIPAddressInterface(self):
"""Test IPAddressInterface"""
# FIXME: construct object with mandatory attributes with example values
# model = netbox_client.models.ip_address_interface.IPAddressInterface() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
8bfdf81e80831a58f4e585c3016eb416e2c55ff4 | 71ac98ddc5a58a033631683757b5a7509f5dfc5f | /0x05-python-exceptions/3-safe_print_division.py | a1183b7378a83d70ab8cfc7619d3a5136fd69857 | [] | no_license | Virteip/holbertonschool-higher_level_programming | 8193a22eb0de4b800ec2408207735f242dfda6f5 | 616948631871acbd8a5b6887f79ee1801f3b5516 | refs/heads/master | 2020-09-29T01:17:15.120539 | 2020-05-15T02:25:22 | 2020-05-15T02:25:22 | 226,911,855 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | #!/usr/bin/python3
def safe_print_division(a, b):
try:
r = a / b
except:
r = None
finally:
print("Inside result: {0}".format(r))
return r
| [
"[email protected]"
] | |
339357bd4950057f0ff663546f4b6a87a621cca9 | e21c70d5b03633b4e0a89dfccb0cb8ccd88612d0 | /venv/lib/python3.5/site-packages/engineio/socket.py | 65cff1c7b929b32fb0d80ee35a24b800f90379bd | [
"MIT"
] | permissive | LavanyaRamkumar/Networking-app_Dynamic-Quiz | 4d5540088b1e2724626dda8df0fd83442391b40f | 4de8329845712864d3cc8e8b81cfce5a1207224d | refs/heads/master | 2023-02-09T12:08:19.913354 | 2019-10-26T04:23:54 | 2019-10-26T04:23:54 | 173,337,916 | 1 | 1 | MIT | 2023-02-02T04:48:55 | 2019-03-01T16:56:13 | Python | UTF-8 | Python | false | false | 9,102 | py | import six
import sys
import time
from . import exceptions
from . import packet
from . import payload
class Socket(object):
"""An Engine.IO socket."""
upgrade_protocols = ['websocket']
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.queue = self.create_queue()
self.last_ping = time.time()
self.connected = False
self.upgraded = False
self.closing = False
self.closed = False
def create_queue(self):
return getattr(self.server._async['queue'],
self.server._async['queue_class'])()
def poll(self):
"""Wait for packets to send to the client."""
try:
packets = [self.queue.get(timeout=self.server.ping_timeout)]
self.queue.task_done()
except self.server._async['queue'].Empty:
raise exceptions.QueueEmpty()
if packets == [None]:
return []
while True:
try:
packets.append(self.queue.get(block=False))
self.queue.task_done()
except self.server._async['queue'].Empty:
break
return packets
def receive(self, pkt):
"""Receive packet from the client."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.server.logger.info('%s: Received packet %s data %s',
self.sid, packet_name,
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
if pkt.packet_type == packet.PING:
self.last_ping = time.time()
self.send(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.MESSAGE:
self.server._trigger_event('message', self.sid, pkt.data,
run_async=self.server.async_handlers)
elif pkt.packet_type == packet.UPGRADE:
self.send(packet.Packet(packet.NOOP))
elif pkt.packet_type == packet.CLOSE:
self.close(wait=False, abort=True)
else:
raise exceptions.UnknownPacketError()
def send(self, pkt):
"""Send a packet to the client."""
if self.closed:
raise exceptions.SocketIsClosedError()
if time.time() - self.last_ping > self.server.ping_timeout:
self.server.logger.info('%s: Client is gone, closing socket',
self.sid)
self.close(wait=False, abort=True)
return
self.queue.put(pkt)
self.server.logger.info('%s: Sending packet %s data %s',
self.sid, packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
def handle_get_request(self, environ, start_response):
"""Handle a long-polling GET request from the client."""
connections = [
s.strip()
for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
transport = environ.get('HTTP_UPGRADE', '').lower()
if 'upgrade' in connections and transport in self.upgrade_protocols:
self.server.logger.info('%s: Received request to upgrade to %s',
self.sid, transport)
return getattr(self, '_upgrade_' + transport)(environ,
start_response)
try:
packets = self.poll()
except exceptions.QueueEmpty:
exc = sys.exc_info()
self.close(wait=False)
six.reraise(*exc)
return packets
def handle_post_request(self, environ):
"""Handle a long-polling POST request from the client."""
length = int(environ.get('CONTENT_LENGTH', '0'))
if length > self.server.max_http_buffer_size:
raise exceptions.ContentTooLongError()
else:
body = environ['wsgi.input'].read(length)
p = payload.Payload(encoded_payload=body)
for pkt in p.packets:
self.receive(pkt)
def close(self, wait=True, abort=False):
"""Close the socket connection."""
if not self.closed and not self.closing:
self.closing = True
self.server._trigger_event('disconnect', self.sid, run_async=False)
if not abort:
self.send(packet.Packet(packet.CLOSE))
self.closed = True
if wait:
self.queue.join()
def _upgrade_websocket(self, environ, start_response):
"""Upgrade the connection from polling to websocket."""
if self.upgraded:
raise IOError('Socket has been upgraded already')
if self.server._async['websocket'] is None or \
self.server._async['websocket_class'] is None:
# the selected async mode does not support websocket
return self.server._bad_request()
websocket_class = getattr(self.server._async['websocket'],
self.server._async['websocket_class'])
ws = websocket_class(self._websocket_handler)
return ws(environ, start_response)
def _websocket_handler(self, ws):
"""Engine.IO handler for websocket transport."""
# try to set a socket timeout matching the configured ping interval
for attr in ['_sock', 'socket']: # pragma: no cover
if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'):
getattr(ws, attr).settimeout(self.server.ping_timeout)
if self.connected:
# the socket was already connected, so this is an upgrade
self.queue.join() # flush the queue first
pkt = ws.wait()
if pkt != packet.Packet(packet.PING,
data=six.text_type('probe')).encode(
always_bytes=False):
self.server.logger.info(
'%s: Failed websocket upgrade, no PING packet', self.sid)
return []
ws.send(packet.Packet(
packet.PONG,
data=six.text_type('probe')).encode(always_bytes=False))
self.send(packet.Packet(packet.NOOP))
pkt = ws.wait()
decoded_pkt = packet.Packet(encoded_packet=pkt)
if decoded_pkt.packet_type != packet.UPGRADE:
self.upgraded = False
self.server.logger.info(
('%s: Failed websocket upgrade, expected UPGRADE packet, '
'received %s instead.'),
self.sid, pkt)
return []
self.upgraded = True
else:
self.connected = True
self.upgraded = True
# start separate writer thread
def writer():
while True:
packets = None
try:
packets = self.poll()
except exceptions.QueueEmpty:
break
if not packets:
# empty packet list returned -> connection closed
break
try:
for pkt in packets:
ws.send(pkt.encode(always_bytes=False))
except:
break
writer_task = self.server.start_background_task(writer)
self.server.logger.info(
'%s: Upgrade to websocket successful', self.sid)
while True:
p = None
try:
p = ws.wait()
except Exception as e:
# if the socket is already closed, we can assume this is a
# downstream error of that
if not self.closed: # pragma: no cover
self.server.logger.info(
'%s: Unexpected error "%s", closing connection',
self.sid, str(e))
break
if p is None:
# connection closed by client
break
if isinstance(p, six.text_type): # pragma: no cover
p = p.encode('utf-8')
pkt = packet.Packet(encoded_packet=p)
try:
self.receive(pkt)
except exceptions.UnknownPacketError:
pass
except exceptions.SocketIsClosedError:
self.server.logger.info('Receive error -- socket is closed')
break
except: # pragma: no cover
# if we get an unexpected exception we log the error and exit
# the connection properly
self.server.logger.exception('Unknown receive error')
break
self.queue.put(None) # unlock the writer task so that it can exit
writer_task.join()
self.close(wait=True, abort=True)
return []
| [
"[email protected]"
] | |
4613d4637b8e5446f2b84a1ba6f4097ec0a3d330 | f8666599b83d34c861651861cc7db5b3c434fc87 | /plotly/validators/scatter3d/error_y/__init__.py | a9450d059bf50c8b989357d05ae9e897fc206983 | [
"MIT"
] | permissive | mode/plotly.py | 8b66806e88c9f1820d478bab726f0bea81884432 | c5a9ac386a40df2816e6c13264dadf14299401e4 | refs/heads/master | 2022-08-26T00:07:35.376636 | 2018-09-26T19:08:54 | 2018-09-26T19:19:31 | 60,372,968 | 1 | 1 | MIT | 2019-11-13T23:03:22 | 2016-06-03T19:34:55 | Python | UTF-8 | Python | false | false | 618 | py | from ._width import WidthValidator
from ._visible import VisibleValidator
from ._valueminus import ValueminusValidator
from ._value import ValueValidator
from ._type import TypeValidator
from ._tracerefminus import TracerefminusValidator
from ._traceref import TracerefValidator
from ._thickness import ThicknessValidator
from ._symmetric import SymmetricValidator
from ._copy_zstyle import CopyZstyleValidator
from ._color import ColorValidator
from ._arraysrc import ArraysrcValidator
from ._arrayminussrc import ArrayminussrcValidator
from ._arrayminus import ArrayminusValidator
from ._array import ArrayValidator
| [
"[email protected]"
] | |
5956a7e2faaa079368dd7896181b368f16ece831 | 1079e654ce950f50bc4ed27c0974f78589428573 | /tests/__init__.py | 23ad5cb638accd087a54d8728cf2d5068b0f93ee | [] | no_license | uranusjr/python-web-stack | d2aaa64a4ba6e5d447db1444ab08813e507961e1 | 971c0190f28abfbd8c577e2b149e2d1a604d3f13 | refs/heads/master | 2020-04-27T11:17:21.922742 | 2013-11-20T10:40:16 | 2013-11-20T10:40:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 953 | py | #!/usr/bin/env python
# -*- coding: utf-8
import os.path
import shutil
import tempfile
from nose.plugins.skip import SkipTest
from pywebstack import utils
__all__ = [
'ALL_FORMULAE_NAMES', 'PROJECT_NAME', 'TEMP_DIR',
'MockedArguments',
'skipped', 'create_tempdir', 'cleanup_tempdir'
]
ALL_FORMULAE_NAMES = (
'django', 'Django'
)
PROJECT_NAME = 'omega_directive'
TEMP_DIR = os.path.join(tempfile.gettempdir(), 'pywebstack_test')
class MockedArguments(dict):
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
self[key] = value
def create_tempdir():
utils.mkdir_p(TEMP_DIR)
def cleanup_tempdir():
shutil.rmtree(TEMP_DIR)
def skipped(f):
"""Decorator for instructing nose to skip the decorated test"""
def _():
raise SkipTest
_.__name__ = f.__name__
return _
| [
"[email protected]"
] | |
c2ec06b06c3e618131391854042d37d0ab13a261 | 426aed70aa6925105f10c7fcb7b611b277bf8b84 | /benchmarks/benchmarks/model_acc/bench_sage_ns.py | 6f99d94ad32d6ce29bf95b128eed28951ecb9b7f | [
"Apache-2.0"
] | permissive | hengruizhang98/dgl | 0ce7201ca7380482440f031cb8ced6ca0e8c8dc1 | 195f99362d883f8b6d131b70a7868a537e55b786 | refs/heads/master | 2023-06-10T22:21:45.835646 | 2021-04-13T12:29:43 | 2021-04-13T12:29:43 | 336,804,001 | 3 | 0 | Apache-2.0 | 2021-02-07T14:16:20 | 2021-02-07T14:16:20 | null | UTF-8 | Python | false | false | 7,162 | py | import dgl
import torch as th
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.multiprocessing as mp
from torch.utils.data import DataLoader
import dgl.nn.pytorch as dglnn
import time
from .. import utils
class SAGE(nn.Module):
def __init__(self,
in_feats,
n_hidden,
n_classes,
n_layers,
activation,
dropout):
super().__init__()
self.n_layers = n_layers
self.n_hidden = n_hidden
self.n_classes = n_classes
self.layers = nn.ModuleList()
self.layers.append(dglnn.SAGEConv(in_feats, n_hidden, 'mean'))
for i in range(1, n_layers - 1):
self.layers.append(dglnn.SAGEConv(n_hidden, n_hidden, 'mean'))
self.layers.append(dglnn.SAGEConv(n_hidden, n_classes, 'mean'))
self.dropout = nn.Dropout(dropout)
self.activation = activation
def forward(self, blocks, x):
h = x
for l, (layer, block) in enumerate(zip(self.layers, blocks)):
h = layer(block, h)
if l != len(self.layers) - 1:
h = self.activation(h)
h = self.dropout(h)
return h
def inference(self, g, x, batch_size, device):
"""
Inference with the GraphSAGE model on full neighbors (i.e. without neighbor sampling).
g : the entire graph.
x : the input of entire node set.
The inference code is written in a fashion that it could handle any number of nodes and
layers.
"""
# During inference with sampling, multi-layer blocks are very inefficient because
# lots of computations in the first few layers are repeated.
# Therefore, we compute the representation of all nodes layer by layer. The nodes
# on each layer are of course splitted in batches.
# TODO: can we standardize this?
for l, layer in enumerate(self.layers):
y = th.zeros(g.number_of_nodes(), self.n_hidden if l !=
len(self.layers) - 1 else self.n_classes)
sampler = dgl.dataloading.MultiLayerFullNeighborSampler(1)
dataloader = dgl.dataloading.NodeDataLoader(
g,
th.arange(g.number_of_nodes()),
sampler,
batch_size=batch_size,
shuffle=True,
drop_last=False,
num_workers=4)
for input_nodes, output_nodes, blocks in dataloader:
block = blocks[0]
block = block.int().to(device)
h = x[input_nodes].to(device)
h = layer(block, h)
if l != len(self.layers) - 1:
h = self.activation(h)
h = self.dropout(h)
y[output_nodes] = h.cpu()
x = y
return y
def compute_acc(pred, labels):
"""
Compute the accuracy of prediction given the labels.
"""
labels = labels.long()
return (th.argmax(pred, dim=1) == labels).float().sum() / len(pred)
def evaluate(model, g, inputs, labels, val_nid, batch_size, device):
"""
Evaluate the model on the validation set specified by ``val_nid``.
g : The entire graph.
inputs : The features of all the nodes.
labels : The labels of all the nodes.
val_nid : the node Ids for validation.
batch_size : Number of nodes to compute at the same time.
device : The GPU device to evaluate on.
"""
model.eval()
with th.no_grad():
pred = model.inference(g, inputs, batch_size, device)
model.train()
return compute_acc(pred[val_nid], labels[val_nid])
def load_subtensor(g, seeds, input_nodes, device):
"""
Copys features and labels of a set of nodes onto GPU.
"""
batch_inputs = g.ndata['features'][input_nodes].to(device)
batch_labels = g.ndata['labels'][seeds].to(device)
return batch_inputs, batch_labels
@utils.benchmark('acc', 600)
@utils.parametrize('data', ['ogbn-products', "reddit"])
def track_acc(data):
data = utils.process_data(data)
device = utils.get_bench_device()
g = data[0]
g.ndata['features'] = g.ndata['feat']
g.ndata['labels'] = g.ndata['label']
in_feats = g.ndata['features'].shape[1]
n_classes = data.num_classes
# Create csr/coo/csc formats before launching training processes with multi-gpu.
# This avoids creating certain formats in each sub-process, which saves momory and CPU.
g.create_formats_()
num_epochs = 20
num_hidden = 16
num_layers = 2
fan_out = '5,10'
batch_size = 1024
lr = 0.003
dropout = 0.5
num_workers = 4
train_nid = th.nonzero(g.ndata['train_mask'], as_tuple=True)[0]
# Create PyTorch DataLoader for constructing blocks
sampler = dgl.dataloading.MultiLayerNeighborSampler(
[int(fanout) for fanout in fan_out.split(',')])
dataloader = dgl.dataloading.NodeDataLoader(
g,
train_nid,
sampler,
batch_size=batch_size,
shuffle=True,
drop_last=False,
num_workers=num_workers)
# Define model and optimizer
model = SAGE(in_feats, num_hidden, n_classes, num_layers, F.relu, dropout)
model = model.to(device)
loss_fcn = nn.CrossEntropyLoss()
loss_fcn = loss_fcn.to(device)
optimizer = optim.Adam(model.parameters(), lr=lr)
# dry run one epoch
for step, (input_nodes, seeds, blocks) in enumerate(dataloader):
# Load the input features as well as output labels
#batch_inputs, batch_labels = load_subtensor(g, seeds, input_nodes, device)
blocks = [block.int().to(device) for block in blocks]
batch_inputs = blocks[0].srcdata['features']
batch_labels = blocks[-1].dstdata['labels']
# Compute loss and prediction
batch_pred = model(blocks, batch_inputs)
loss = loss_fcn(batch_pred, batch_labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Training loop
for epoch in range(num_epochs):
# Loop over the dataloader to sample the computation dependency graph as a list of
# blocks.
for step, (input_nodes, seeds, blocks) in enumerate(dataloader):
# Load the input features as well as output labels
#batch_inputs, batch_labels = load_subtensor(g, seeds, input_nodes, device)
blocks = [block.int().to(device) for block in blocks]
batch_inputs = blocks[0].srcdata['features']
batch_labels = blocks[-1].dstdata['labels']
# Compute loss and prediction
batch_pred = model(blocks, batch_inputs)
loss = loss_fcn(batch_pred, batch_labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
test_g = g
test_nid = th.nonzero(
~(test_g.ndata['train_mask'] | test_g.ndata['val_mask']), as_tuple=True)[0]
test_acc = evaluate(
model, test_g, test_g.ndata['features'], test_g.ndata['labels'], test_nid, batch_size, device)
return test_acc.item()
| [
"[email protected]"
] | |
daba2bcf498cc73ac6ef73cd4a09cb9bed329969 | 53edf6b0f4262ee76bb4e3b943394cfeafe54865 | /linear_theory/Non_linear_stuff/plot_nonlinear_from_data.py | 97e9a4d14b244d725d021a167e009d6a2dcf67e6 | [] | no_license | Yoshi2112/hybrid | f86265a2d35cb0a402ba6ab5f718717d8eeb740c | 85f3051be9368bced41af7d73b4ede9c3e15ff16 | refs/heads/master | 2023-07-07T21:47:59.791167 | 2023-06-27T23:09:23 | 2023-06-27T23:09:23 | 82,878,960 | 0 | 1 | null | 2020-04-16T18:03:59 | 2017-02-23T03:14:49 | Python | UTF-8 | Python | false | false | 37,835 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 2 22:05:39 2022
@author: Yoshi
"""
import os, sys, warnings, pdb, emd
sys.path.append('../../')
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import matplotlib.dates as mdates
import matplotlib as mpl
import matplotlib.cm as cm
sys.path.append('D://Google Drive//Uni//PhD 2017//Data//Scripts//')
import crres_file_readers as cfr
import rbsp_file_readers as rfr
import rbsp_fields_loader as rfl
import analysis_scripts as ascr
import fast_scripts as fscr
import nonlinear_scripts as nls
import extract_parameters_from_data as epd
sys.path.append('..//new_general_DR_solver//')
from matplotlib.lines import Line2D
from multiapprox_dispersion_solver import create_species_array, get_dispersion_relation, get_cold_growth_rates
#%% Constants
PCHARGE = 1.602e-19
ECHARGE =-1.602e-19
PMASS = 1.673e-27
EMASS = 9.110e-31
EPS0 = 8.854e-12
MU0 = 4e-7*np.pi
RE = 6.371e6
SPLIGHT = 3e8
KB = 1.380649e-23
B_SURF = 3.12e-5
#%% FIELD ANALYSIS
def get_mag_data(time_start, time_end, probe, low_cut=None, high_cut=None):
'''
Load and process data
'''
if probe != 'crres':
ti, pc1_mags, pc1_elec, HM_mags, HM_elec, dt, e_flag, gfreq =\
rfl.load_decomposed_fields(_rbsp_path, time_start, time_end, probe,
pad=600, LP_B0=1.0, LP_HM=30.0, ex_threshold=5.0,
get_gyfreqs=True)
else:
ti, B0, HM_mags, pc1_mags, \
E0, HM_elec, pc1_elec, S, B, E = cfr.get_crres_fields(_crres_path, time_start, time_end,
pad=600, output_raw_B=True, Pc5_LP=30.0, B0_LP=1.0,
Pc5_HP=None, dEx_LP=None, interpolate_nan=True)
dt = 1/32.
# Bandpass selected component and return
if low_cut is not None:
dat = ascr.clw_high_pass(pc1_mags, low_cut*1000., dt, filt_order=4)
if high_cut is not None:
dat = ascr.clw_low_pass(pc1_mags, high_cut*1000., dt, filt_order=4)
#pc1_res = 5.0
_xpow, _xtime, _xfreq = fscr.autopower_spectra(ti, pc1_mags[:, 0], time_start,
time_end, dt, overlap=0.95, df=pc1_res,
window_data=True)
_ypow, _xtime, _xfreq = fscr.autopower_spectra(ti, pc1_mags[:, 1], time_start,
time_end, dt, overlap=0.95, df=pc1_res,
window_data=True)
_zpow, _xtime, _xfreq = fscr.autopower_spectra(ti, pc1_mags[:, 2], time_start,
time_end, dt, overlap=0.95, df=pc1_res,
window_data=True)
_pow = np.array([_xpow, _ypow, _zpow])
return ti, dat, HM_mags, dt, _xtime, _xfreq, _pow, gfreq
def load_EMIC_IMFs_and_dynspec(imf_start, imf_end, IA_filter=None):
'''
Loads IMFs and dynamic spectra
'''
_ti, _dat, _HM_mags, _dt, _xtime, _xfreq, _pow, gfreq = get_mag_data(_time_start, _time_end,
_probe, low_cut=_band_start, high_cut=_band_end)
sample_rate = 1./_dt
# Calculate IMFs, get instantaneous phase, frequency, amplitude
print(f'Sifting IMFs and performing HHT between {imf_start} and {imf_end}')
imfs, IPs, IFs, IAs = [], [], [], []
for ii, lbl in zip(range(3), ['x', 'y', 'z']):
imf = emd.sift.sift(_dat[:, ii], sift_thresh=1e-2)
IP, IF, IA = emd.spectra.frequency_transform(imf, sample_rate, 'hilbert')
print(f'{imf.shape[1]} IMFs found for B{lbl}')
imfs.append(imf)
IPs.append(IP)
IFs.append(IF)
IAs.append(IA)
# =============================================================================
# if IA_filter is not None:
# for ii in range(3):
# for ii in range(IAs[ii].shape[0]):
# if
# =============================================================================
# Snip IMFs to time
st, en = ascr.boundary_idx64(_ti, imf_start, imf_end)
for ii in range(3):
_imf_t = _ti[st:en]
IAs[ii] = IAs[ii][st:en]
IFs[ii] = IFs[ii][st:en]
IPs[ii] = IPs[ii][st:en]
return _ti, _dat, _HM_mags, _imf_t, IAs, IFs, IPs, _xtime, _xfreq, _pow, gfreq
def get_pc1_peaks(sfreq, spower, band_start, band_end, npeaks=None):
'''
Returns integrated spower spectrum between band_start and band_end as well
as index locations of the most prominent npeaks
'''
fst, fen = ascr.boundary_idx64(sfreq, band_start, band_end)
pc1_int_power = spower[fst:fen, :].sum(axis=0)
peak_idx = ascr.find_peaks(pc1_int_power, npeaks=npeaks, sortby='prom')
return pc1_int_power, peak_idx
#%% DATA INTERPOLATION AND LOADING
def HOPE_interpolate_to_time(new_time, HOPE_time, HOPE_dens, HOPE_temp, HOPE_anis):
'''
edens_time :: WAVES electron density time to interpolate data_array to (length M)
data_time :: Current HOPE sample times of length N
data_array :: Data arrays consisting of ni, Ti, Ai in a 3xN ndarra
'''
new_dens = np.zeros((3, new_time.shape[0]), dtype=np.float64)
new_temp = np.zeros((3, new_time.shape[0]), dtype=np.float64)
new_anis = np.zeros((3, new_time.shape[0]), dtype=np.float64)
xi = new_time.astype(np.int64)
xp = HOPE_time.astype(np.int64)
for ii in range(3):
new_dens[ii, :] = np.interp(xi, xp, HOPE_dens[ii, :])
new_temp[ii, :] = np.interp(xi, xp, HOPE_temp[ii, :])
new_anis[ii, :] = np.interp(xi, xp, HOPE_anis[ii, :])
return new_dens, new_temp, new_anis
def SPICE_interpolate_to_time(new_time, SPICE_times, SPICE_dens, SPICE_PerpPres, SPICE_ParaPres):
'''
edens_time :: WAVES electron density time to interpolate data_array to (length M)
data_times :: Current RBSPICE data sample times of length 3xN
data_array :: Data arrays consisting of ni, Ti, Ai in a 3xN ndarra
'''
new_dens = np.zeros((3, new_time.shape[0]), dtype=np.float64)
new_Pper = np.zeros((3, new_time.shape[0]), dtype=np.float64)
new_Ppar = np.zeros((3, new_time.shape[0]), dtype=np.float64)
xi = new_time.astype(np.int64)
for ii in range(3):
# Skip if bad
if (SPICE_times[ii] is None) or (SPICE_dens[ii] is None):
continue
xp = SPICE_times[ii].astype(np.int64)
new_dens[ii, :] = np.interp(xi, xp, SPICE_dens[ii, :])
new_Pper[ii, :] = np.interp(xi, xp, SPICE_PerpPres[ii, :])
new_Ppar[ii, :] = np.interp(xi, xp, SPICE_ParaPres[ii, :])
return new_dens, new_Pper, new_Ppar
def interpolate_B(new_time, b_time, b_array, dt, LP_filter=True):
'''
Does second LP filter based on the Nyquist of the new sample rate
Different to original filter, which is just to get rid of EMIC signal
'''
# Filter at Nyquist frequency to prevent aliasing
if LP_filter == True:
nyq = 1.0 / (2.0 * dt)
for ii in range(3):
b_array[:, ii] = ascr.clw_low_pass(b_array[:, ii].copy(), nyq, 1./64., filt_order=4)
xp = b_time.astype(np.int64)
yp = np.sqrt(b_array[:, 0] ** 2 + b_array[:, 1] ** 2 + b_array[:, 2] ** 2)
xi = new_time.astype(np.int64)
yi = np.interp(xi, xp, yp)
return yi
def interpolate_ne(new_time, den_time, den_array):
return np.interp(new_time.astype(np.int64), den_time.astype(np.int64), den_array)
def load_and_interpolate_plasma_params(time_start, time_end, probe, nsec=None,
rbsp_path='G://DATA//RBSP//', HM_filter_mhz=None,
time_array=None, check_interp=False):
'''
Same copy+paste as other versions, without the SPICE stuff
'''
print('Loading and interpolating satellite data')
# Ephemeris data
# Load ephemeris data
eph_params = ['L', 'CD_MLAT', 'CD_MLT']
eph_times, eph_dict = rfr.retrieve_RBSP_ephemeris_data(rbsp_path, probe, time_start, time_end,
eph_params, padding=[60, 60])
# Cold (total?) electron plasma density
den_times, edens, dens_err = rfr.retrieve_RBSP_electron_density_data(rbsp_path, time_start, time_end,
probe, pad=30)
# Magnetic magnitude
mag_times, raw_mags = rfl.load_magnetic_field(rbsp_path, time_start, time_end, probe, return_raw=True, pad=3600)
# Filter out EMIC waves (background plus HM)
if HM_filter_mhz is not None:
filt_mags = np.zeros(raw_mags.shape)
for ii in range(3):
filt_mags[:, ii] = ascr.clw_low_pass(raw_mags[:, ii], HM_filter_mhz, 1./64., filt_order=4)
else:
filt_mags = raw_mags
# HOPE data
itime, etime, pdict, perr = rfr.retrieve_RBSP_hope_moment_data( rbsp_path, time_start, time_end, padding=30, probe=probe)
hope_dens = np.array([pdict['Dens_p_30'], pdict['Dens_he_30'], pdict['Dens_o_30']])
hope_tperp = np.array([pdict['Tperp_p_30'], pdict['Tperp_he_30'], pdict['Tperp_o_30']])
hope_tpar = np.array([pdict['Tpar_p_30'], pdict['Tpar_he_30'], pdict['Tpar_o_30']])
hope_anis = np.array([pdict['Tperp_Tpar_p_30'], pdict['Tperp_Tpar_he_30'], pdict['Tperp_Tpar_o_30']]) - 1.
# Interpolation step
if nsec is None:
# This should let me set my own timebase by feeding in an array
if time_array is None:
time_array = den_times.copy()
iedens = edens.copy()
else:
iedens = interpolate_ne(time_array, den_times, edens)
else:
time_array = np.arange(time_start, time_end, np.timedelta64(nsec, 's'), dtype='datetime64[us]')
iedens = interpolate_ne(time_array, den_times, edens)
ihope_dens , ihope_tpar , ihope_anis = HOPE_interpolate_to_time(time_array, itime, hope_dens, hope_tpar, hope_anis)
ihope_dens , ihope_tperp, ihope_anis = HOPE_interpolate_to_time(time_array, itime, hope_dens, hope_tperp, hope_anis)
Bi = interpolate_B(time_array, mag_times, filt_mags, nsec, LP_filter=False)
iL = interpolate_ne(time_array, eph_times, eph_dict['L'])
if check_interp:
plt.ioff()
# Cold dens + Magnetic field
fig1, axes1 = plt.subplots(2)
axes1[0].plot(den_times, edens, c='b')
axes1[0].plot(time_array, iedens, c='r', lw=0.75)
axes1[0].set_ylabel('$n_e$')
B_total = np.sqrt(raw_mags[:, 0]**2+raw_mags[:, 1]**2+raw_mags[:, 2]**2)
axes1[1].plot(mag_times, B_total, c='b')
axes1[1].plot(time_array, Bi, c='r', lw=0.75)
axes1[1].set_ylabel('B')
for ax in axes1:
ax.set_xlim(time_start, time_end)
# HOPE parameters (dens, temp, anis)
fig2, axes2 = plt.subplots(3)
for xx, clr in zip(range(3), ['r', 'g', 'b']):
axes2[0].plot(itime, hope_dens[xx], c=clr, ls='-')
axes2[0].plot(time_array, ihope_dens[xx], c=clr, ls='--')
axes2[0].set_ylabel('$n_i (cc)$')
axes2[1].plot(itime, hope_tpar[xx], c=clr, ls='-')
axes2[1].plot(time_array, ihope_tpar[xx], c=clr, ls='--')
axes2[1].set_ylabel('$T_{\perp, i} (keV)$')
axes2[2].plot(itime, hope_anis[xx], c=clr, ls='-')
axes2[2].plot(time_array, ihope_anis[xx], c=clr, ls='--')
axes2[2].set_ylabel('$A_i$')
for ax in axes2:
ax.set_xlim(time_start, time_end)
plt.show()
# Subtract energetic components from total electron density (assuming each is singly charged)
cold_dens = iedens - ihope_dens.sum(axis=0)
# Original DTs just for reference
den_dt = ( den_times[1] - den_times[0]) / np.timedelta64(1, 's')
mag_dt = ( mag_times[1] - mag_times[0]) / np.timedelta64(1, 's')
hope_dt = ( itime[1] - itime[0]) / np.timedelta64(1, 's')
new_dt = ( time_array[1] - time_array[0]) / np.timedelta64(1, 's')
print('Original sample periods:')
print(f'Cold Plasma Density: {den_dt} s')
print(f'Magnetic field: {mag_dt} s ')
print(f'HOPE Particle data: {hope_dt} s')
print('')
print(f'New sample period: {new_dt} s')
return time_array, Bi*1e-9, iedens*1e6, cold_dens*1e6, ihope_dens*1e6, ihope_tpar, ihope_tperp, ihope_anis, iL
def load_CRRES_data(time_start, time_end, crres_path='G://DATA//CRRES//', nsec=None):
'''
Since no moment data exists for CRRES, this loads only the cold electron density and
magnetic field (with option to low-pass filter) and interpolates them to
the same timebase (linear or cubic? Just do linear for now).
If nsec is none, interpolates B to ne. Else, interpolates both to nsec.
CRRES density cadence bounces between 8-9 seconds (terrible for FFT, alright for interp)
den_dict params: ['VTCW', 'YRDOY', 'TIMESTRING', 'FCE_KHZ', 'FUHR_KHZ', 'FPE_KHZ', 'NE_CM3', 'ID', 'M']
TODO: Fix this. B_arr shape and nyq in mHz
'''
# Load data
times, B0, HM, dB, E0, HMe, dE, S, B, E = cfr.get_crres_fields(crres_path,
time_start, time_end, pad=1800, E_ratio=5.0, rotation_method='vector',
output_raw_B=True, interpolate_nan=None, B0_LP=1.0,
Pc1_LP=5000, Pc1_HP=100, Pc5_LP=30, Pc5_HP=None, dEx_LP=None)
den_times, den_dict = cfr.get_crres_density(crres_path, time_start, time_end, pad=600)
edens = den_dict['NE_CM3']
# Interpolate B only
if nsec is None:
# Low-pass total field to avoid aliasing (Assume 8.5 second cadence)
B_dt = 1.0 / 32.0
nyq = 1.0 / (2.0 * 8.5)
for ii in range(3):
B[:, ii] = ascr.clw_low_pass(B[:, ii].copy(), nyq, B_dt, filt_order=4)
# Take magnitude and interpolate
B_mag = np.sqrt(B[:, 0] ** 2 + B[:, 1] ** 2 + B[:, 2] ** 2)
B_interp = np.interp(den_times.astype(np.int64), times.astype(np.int64), B_mag)
return den_times, B_interp, edens
else:
# Define new time array
ntime = np.arange(time_start, time_end, np.timedelta64(nsec, 's'), dtype='datetime64[us]')
# Low-pass total field to avoid aliasing (Assume 8.5 second cadence)
B_dt = 1.0 / 32.0
nyq = 1.0 / (2.0 * nsec)
for ii in range(3):
B[ii] = ascr.clw_low_pass(B[ii].copy(), nyq, B_dt, filt_order=4)
# Take magnitude and interpolate
B_mag = np.sqrt(B[0] ** 2 + B[1] ** 2 + B[2] ** 2)
B_interp = np.interp(ntime.astype(np.int64), times.astype(np.int64), B_mag)
# Also interpolate density
ne_interp = np.interp(ntime.astype(np.int64), den_times.astype(np.int64), edens)
return ntime, B_interp, ne_interp
#%% PLOTTING ROUTINES
def add_custom_legend(_ax, _labels, _linestyles, _alpha, _color):
'''
TODO: Add some catches for 'if None...' for things like alpha and linestyle
'''
legend_elements = []
for label, style, alpha, clr in zip(_labels, _linestyles, _alpha, _color):
legend_elements.append(Line2D([0], [0], color=clr, lw=1,
label=label, linestyle=style, alpha=alpha))
new_legend = _ax.legend(handles=legend_elements, loc='upper left')
return new_legend
def calculate_all_NL_amplitudes():
# Import cutoff-derived composition information
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20130725_RBSP-A//pearl_times.txt'
cutoff_dict = epd.read_cutoff_file(cutoff_filename)
#plot_amplitudes_from_data(_time_start, _time_end, probe=_probe, pad=600)
time_start = _time_start
time_end = _time_end
probe = 'a'
pad = 0
plot_start = time_start - np.timedelta64(int(pad), 's')
plot_end = time_end + np.timedelta64(int(pad), 's')
time, mag, edens, cold_dens, hope_dens, hope_tpar, hope_tperp, hope_anis, L_vals =\
load_and_interpolate_plasma_params(
plot_start, plot_end, probe, nsec=None,
rbsp_path='E://DATA//RBSP//', HM_filter_mhz=50.0,
time_array=None, check_interp=False)
mag_time, pc1_mags, HM_mags, imf_time, IA, IF, IP, stime, sfreq, spower = \
load_EMIC_IMFs_and_dynspec(plot_start, plot_end)
# Specify color values for time
time0 = time[ 0].astype(np.int64)
time1 = time[-1].astype(np.int64)
norm = mpl.colors.LogNorm(vmin=time0, vmax=time1, clip=False)
mapper = cm.ScalarMappable(norm=norm, cmap=cm.jet)
lpad = 20
fig, axes = plt.subplots(nrows=4, ncols=2, figsize=(8.27, 11.69),
gridspec_kw={'width_ratios':[1, 0.01],
'height_ratios':[1, 1, 0.5, 2]
})
# Spectra/IP
im0 = axes[0, 0].pcolormesh(stime, sfreq, spower.sum(axis=0).T, cmap='jet',
norm=colors.LogNorm(vmin=1e-4, vmax=1e1))
axes[0, 0].set_ylim(0, fmax)
axes[0, 0].set_ylabel('$f$\n(Hz)', rotation=0, labelpad=lpad, fontsize=12)
fig.colorbar(im0, cax=axes[0, 1], extend='both').set_label(
r'$\frac{nT^2}{Hz}$', fontsize=16, rotation=0, labelpad=5)
axes[0, 0].plot(imf_time, IF[0][:, 0], c='k', lw=0.75)
axes[0, 0].plot(imf_time, IF[1][:, 0], c='k', lw=0.75, alpha=0.8)
#axes[0, 0].plot(imf_time, IF[2][:, 0], c='k', lw=0.75, alpha=0.6)
#axes[0, 0].axvline(this_time, color='white', ls='-' , alpha=0.7)
axes[0, 0].set_xlim(plot_start, plot_end)
axes[0, 0].set_xticklabels([])
axes[0, 0].axhline(_band_start , color='white', ls='--')
axes[0, 0].axhline(_band_end , color='white', ls='--')
# mag_time, pc1_mags, IA, IF, IP, stime, sfreq, spower
# Timeseries for comparison
axes[1, 0].plot(mag_time, pc1_mags[:, 0], c='b', label='$\delta B_\\nu$')
axes[1, 0].plot(mag_time, pc1_mags[:, 1], c='r', label='$\delta B_\phi$', alpha=0.5)
#axes[1, 0].plot(mag_time, pc1_mags[:, 2], c='k', label='$\delta B_\mu$', alpha=0.25)
axes[1, 0].set_ylabel('nT', rotation=0, labelpad=lpad)
axes[1, 0].set_xlim(plot_start, plot_end)
axes[1, 0].set_xlabel('Time [UT]')
axes[1, 0].set_xlim(plot_start, plot_end)
# CALCULATE PLASMA PARAMETERS AND AMPLITUDES FOR ALL TIMES
# Plot all on same graph, use colorbar to discern time
# Maybe do for cutoff times/packet times
for ii in range(0, time.shape[0], 4):
this_time = time[ii]
clr = mapper.to_rgba(time[ii].astype(np.int64))
print('Doing time:', this_time)
# Get oxygen concentration from cutoffs
cutoff = np.interp(this_time.astype(np.int64),
cutoff_dict['CUTOFF_TIME'].astype(np.int64),
cutoff_dict['CUTOFF_NORM'])
o_frac = epd.calculate_o_from_he_and_cutoff(cutoff, he_frac)
h_frac = 1. - he_frac - o_frac
# Cold plasma params, SI units
B0 = mag[ii]
name = np.array(['H' , 'He' , 'O' ])
mass = np.array([1.0 , 4.0 , 16.0 ]) * PMASS
charge = np.array([1.0 , 1.0 , 1.0 ]) * PCHARGE
density = np.array([h_frac, he_frac, o_frac ]) * edens[ii]
ani = np.array([0.0 , 0.0 , 0.0 ])
tpar = np.array([0.0 , 0.0 , 0.0 ])
tper = (ani + 1) * tpar
Species, PP = create_species_array(B0, name, mass, charge, density, tper, ani)
# Frequencies to evaluate, calculate wavenumber (cold approximation)
f_min = 0.07*PP['pcyc_rad'] / (2*np.pi)
f_max = 0.24*PP['pcyc_rad'] / (2*np.pi)
Nf = 10000
f_vals = np.linspace(f_min, f_max, Nf)
w_vals = 2*np.pi*f_vals
k_vals = nls.get_k_cold(w_vals, Species)
# Define hot proton parameters (velocities normalized c)
# Remember: temperatures originally in eV
nh = hope_dens[0][ii]
wph2 = nh * PCHARGE ** 2 / (PMASS * EPS0)
Vth_para = np.sqrt(KB * hope_tpar[0][ii]*(PCHARGE/KB) / PMASS) / SPLIGHT
Vth_perp = np.sqrt(KB * hope_tperp[0][ii]*(PCHARGE/KB) / PMASS) / SPLIGHT
Q = 0.5
# Curvature parameters (this has the most wiggle room)
L = 4.7#L_vals[ii]
a = 4.5 / (L*RE)**2
a = a*(SPLIGHT**2/PP['pcyc_rad']**2)
Vg, Vp, Vr = nls.get_velocities(w_vals, Species, PP, normalize=True)
s0, s1, s2 = nls.get_inhomogeneity_terms(w_vals, Species, PP, Vth_perp, normalize_vel=True)
# Normalize input parameters
wph = np.sqrt(wph2) / PP['pcyc_rad']
w = w_vals / PP['pcyc_rad']
# DO THE ACTUAL CALCULATION (All hands off from here, using existing code/proforma)
tau = 1.00
B_th = nls.get_threshold_amplitude(w, wph, Q, s2, a, Vp, Vr, Vth_para, Vth_perp)
B_opt = nls.get_optimum_amplitude(w, wph, Q, tau, s0, s1, Vg, Vr, Vth_para, Vth_perp)
T_tr = nls.get_nonlinear_trapping_period(k_vals, Vth_perp*SPLIGHT, B_opt*PP['B0'])
T_N = tau*T_tr*PP['pcyc_rad']
# Filter zeros and infs:
B_th[B_th == np.inf] = np.nan
B_th[B_th == 0] = np.nan
B_opt[B_opt == np.inf] = np.nan
B_opt[B_opt == 0] = np.nan
T_N[T_N == np.inf] = np.nan
T_N[T_N == 0] = np.nan
################
### PLOTTING ###
################
# Bth, Bopt, Inst. Amplitudes
axes[3, 0].plot(f_vals, B_th*B0*1e9, c=clr, ls='--', label=r'$B_{th}$')
axes[3, 0].plot(f_vals, B_opt*B0*1e9, c=clr, ls='-' , label=r'$B_{opt}$')
axes[3, 0].set_ylabel('$B$ [nT]', rotation=0, labelpad=20, fontsize=16)
axes[3, 0].set_xlabel('$f$ [Hz]', fontsize=16)
axes[3, 0].set_ylim(0, 17)
axes[3, 0].set_xlim(f_vals[0], f_vals[-1])
axes[3, 0].tick_params(top=True, right=True)
add_custom_legend(axes[3, 0], [r'$B_{th}$', r'$B_{opt}$'],
['--', '-'],
[1.0, 1.0],
['k', 'k'])
label_every = 15
cbar = fig.colorbar(mapper, cax=axes[3, 1], label='Time', orientation='vertical',
ticks=time[::label_every].astype(np.int64))
for label in cbar.ax.get_yminorticklabels():
label.set_visible(False)
cbar.ax.set_yticklabels(time[::label_every].astype('datetime64[m]'))
axes[1, 1].set_visible(False)
axes[2, 0].set_visible(False)
axes[2, 1].set_visible(False)
axes[0, 0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))
axes[1, 0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05, hspace=0)
fig.align_ylabels()
plt.show()
return
#%% MAIN
if __name__ == '__main__':
_rbsp_path = 'F://DATA//RBSP//'
_crres_path = 'F://DATA//CRRES//'
_plot_path = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//REVISION_PLOTS//'
if not os.path.exists(_plot_path): os.makedirs(_plot_path)
save_plot = True
# TODO: Put all important event-specific variables in the switch
pc1_res = 15.0
dpi = 200
if True:
_probe = 'a'
_time_start = np.datetime64('2013-07-25T21:25:00')
_time_end = np.datetime64('2013-07-25T21:47:00')
_band_start = 0.20
_band_end = 0.80
_npeaks = 22
fmax = 1.0
he_frac = 0.30
L = 4.70
B_max = 17.0
#cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20130725_RBSP-A//pearl_times.txt'
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20130725_RBSP-A//cutoffs_only.txt'
cases = [0]
else:
_probe = 'a'
_time_start = np.datetime64('2015-01-16T04:25:00')
_time_end = np.datetime64('2015-01-16T05:15:00')
_band_start = 0.10
_band_end = 0.20
_npeaks = 22
fmax = 0.5
he_frac = 0.30
L = 5.73
B_max = 6.00
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only.txt'
#cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only_10mHz.txt'
#cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//pearl_times.txt'
cases = [4, 5]
time_start = _time_start
time_end = _time_end
probe = 'a'
pad = 0
plot_start = time_start - np.timedelta64(int(pad), 's')
plot_end = time_end + np.timedelta64(int(pad), 's')
#%% Non-linear trace plots
if True:
for case in cases:
if case == 0:
# Section 1 (Whole)
parameter_time = np.datetime64('2013-07-25T21:29:40')
packet_start = np.datetime64('2013-07-25T21:27:30')
packet_end = np.datetime64('2013-07-25T21:32:15')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20130725_RBSP-A//cutoffs_only.txt'
_band_start = 0.21
_band_end = 0.43
elif case == 1:
# Single Packet near end
parameter_time = np.datetime64('2013-07-25T21:42:12')
packet_start = np.datetime64('2013-07-25T21:42:12')
#packet_end = np.datetime64('2013-07-25T21:42:45')
packet_end = np.datetime64('2013-07-25T21:43:00')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20130725_RBSP-A//cutoffs_only.txt'
_band_start = 0.43
_band_end = 0.76
elif case == 2:
# Single Packet
parameter_time = np.datetime64('2015-01-16T04:32:04')
packet_start = np.datetime64('2015-01-16T04:27:00')
packet_end = np.datetime64('2015-01-16T05:08:30')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only.txt'
_band_start = 0.12
_band_end = 0.35
elif case == 3:
# Single Packet 1
packet_start = np.datetime64('2015-01-16T04:32:17')
packet_end = np.datetime64('2015-01-16T04:33:31')
parameter_time = np.datetime64('2015-01-16T04:32:34')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only.txt'
_band_start = 0.12
_band_end = 0.18
elif case == 4:
# Single Packet 2
packet_start = np.datetime64('2015-01-16T04:35:03')
packet_end = np.datetime64('2015-01-16T04:36:01')
parameter_time = np.datetime64('2015-01-16T04:35:18')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only.txt'
_band_start = 0.12
_band_end = 0.18
elif case == 5:
# Single Packet 2
packet_start = np.datetime64('2015-01-16T04:47:24')
packet_end = np.datetime64('2015-01-16T04:48:50')
parameter_time = np.datetime64('2015-01-16T04:47:31')
cutoff_filename = 'D://Google Drive//Uni//PhD 2017//Josh PhD Share Folder//Thesis//Data_Plots//20150116_RBSP-A//cutoffs_only.txt'
_band_start = 0.12
_band_end = 0.18
# Import cutoff-derived composition information
cutoff_dict = epd.read_cutoff_file(cutoff_filename)
time, mag, edens, cold_dens, hope_dens, hope_tpar, hope_tperp, hope_anis, L_vals, =\
load_and_interpolate_plasma_params(
plot_start, plot_end, probe, nsec=None,
rbsp_path=_rbsp_path, HM_filter_mhz=50.0,
time_array=None, check_interp=False)
time_idx = np.where(abs(time - parameter_time) == np.min(abs(time - parameter_time)))[0][0]
# Get oxygen concentration from cutoffs
cutoff = np.interp(parameter_time.astype(np.int64),
cutoff_dict['CUTOFF_TIME'].astype(np.int64),
cutoff_dict['CUTOFF_NORM'])
o_frac = epd.calculate_o_from_he_and_cutoff(cutoff, he_frac)
h_frac = 1. - he_frac - o_frac
# Cold plasma params, SI units
B0 = mag[time_idx]
name = np.array(['H' , 'He' , 'O' ])
mass = np.array([1.0 , 4.0 , 16.0 ]) * PMASS
charge = np.array([1.0 , 1.0 , 1.0 ]) * PCHARGE
density = np.array([h_frac, he_frac, o_frac ]) * edens[time_idx]
ani = np.array([0.0 , 0.0 , 0.0 ])
tpar = np.array([0.0 , 0.0 , 0.0 ])
tper = (ani + 1) * tpar
Species, PP = create_species_array(B0, name, mass, charge, density, tper, ani)
# Frequencies to evaluate, calculate wavenumber (cold approximation)
f_min = 0.07*PP['pcyc_rad'] / (2*np.pi)
f_max = 0.24*PP['pcyc_rad'] / (2*np.pi)
Nf = 10000
f_vals = np.linspace(f_min, f_max, Nf)
w_vals = 2*np.pi*f_vals
k_vals = nls.get_k_cold(w_vals, Species)
# Define hot proton parameters (velocities normalized c) : vth = sqrt(kT/m)?
# Remember: temperatures originally in eV
nh = hope_dens[0][time_idx]
wph2 = nh * PCHARGE ** 2 / (PMASS * EPS0)
Vth_para = np.sqrt(KB * hope_tpar[0][time_idx]*(PCHARGE/KB) / PMASS) / SPLIGHT
Vth_perp = np.sqrt(KB * hope_tperp[0][time_idx]*(PCHARGE/KB) / PMASS) / SPLIGHT
Q = 0.5
# Curvature parameters (this has the most wiggle room)
a = 4.5 / (L*RE)**2
a = a*(SPLIGHT**2/PP['pcyc_rad']**2)
Vg, Vp, Vr = nls.get_velocities(w_vals, Species, PP, normalize=True)
s0, s1, s2 = nls.get_inhomogeneity_terms(w_vals, Species, PP, Vth_perp, normalize_vel=True)
# Normalize input parameters
wph = np.sqrt(wph2) / PP['pcyc_rad']
w = w_vals / PP['pcyc_rad']
# DO THE ACTUAL CALCULATION (All hands off from here, using existing code/proforma)
tau = 1.00
B_th = nls.get_threshold_amplitude(w, wph, Q, s2, a, Vp, Vr, Vth_para, Vth_perp)
# Filter zeros and infs:
B_th[B_th == np.inf] = np.nan
B_th[B_th == 0] = np.nan
# Load EMIC data IMF values
mag_time, pc1_mags, HM_mags, imf_time, IA, IF, IP, stime, sfreq, spower, gyfreqs = \
load_EMIC_IMFs_and_dynspec(packet_start, packet_end)
#%% PLOT: NaN's and inf's in arrays: How to filter to plot? Set to all NaN's
# Also semilogy doesn't like zero: Set to NaN
#
# Plot axes:
# -- Dynamic spectra for context, /w label for parameter time and packet times. Maybe IF?
# -- Plot IMF just for check?
# -- Large bottom plot: IF vs. IA
# -- Maybe try a plot for linear/nonlinear growth rates vs. frequency?
# -- Also should look at velocities/energies so we can see what sort of values are resonant
# (Do this separately)
# Filter IF/IA if outside bandpassed frequencies
if True:
for ii in range(IF[0].shape[0]):
for jj in [0, 1]:
if IF[jj][ii, 0] > _band_end or IF[jj][ii, 0] < _band_start:
IF[jj][ii, 0] = np.nan
# Maybe add extra filter for amplitude lower than a certain fraction (e.g. <0.1 nT)
plt.ioff()
lpad = 20; fsize=12
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(8.00, 0.5*11.00),
gridspec_kw={'width_ratios':[1, 0.01],
'height_ratios':[1, 0.3, 2]
})
# Spectra/IP
im0 = axes[0, 0].pcolormesh(stime, sfreq, spower.sum(axis=0).T, cmap='jet',
norm=colors.LogNorm(vmin=1e-4, vmax=1e1))
axes[0, 0].set_ylim(0, fmax)
axes[0, 0].set_ylabel('$f$\n(Hz)', rotation=0, labelpad=lpad, fontsize=fsize)
fig.colorbar(im0, cax=axes[0, 1], extend='both').set_label(
r'$\frac{nT^2}{Hz}$', fontsize=fsize+2, rotation=0, labelpad=20)
axes[0, 0].plot(mag_time, gyfreqs[1], c='yellow', label='$f_{cHe^+}$')
axes[0, 0].plot(mag_time, gyfreqs[2], c='r', label='$f_{cO^+}$')
axes[0, 0].legend(loc='upper right')
axes[0, 0].plot(imf_time, IF[0][:, 0], c='k', lw=0.75)
axes[0, 0].plot(imf_time, IF[1][:, 0], c='k', lw=0.75, alpha=0.8)
#axes[0, 0].plot(imf_time, IF[2][:, 0], c='k', lw=0.75, alpha=0.6)
axes[0, 0].axvline(parameter_time, color='white', ls='-' , alpha=0.7)
axes[0, 0].set_xlim(plot_start, plot_end)
axes[0, 0].axhline(_band_start, color='white', ls='--')
axes[0, 0].axhline(_band_end , color='white', ls='--')
# Bth, Bopt, Inst. Amplitudes
axes[2, 0].plot(f_vals, B_th*B0*1e9, c='k', ls='--', label=r'$B_{th}$')
for tau in [0.25, 0.5, 1.0, 2.0]:
B_opt = nls.get_optimum_amplitude(w, wph, Q, tau, s0, s1, Vg, Vr, Vth_para, Vth_perp)
B_opt[B_opt == np.inf] = np.nan
B_opt[B_opt == 0] = np.nan
tau_lbl = r'$B_{opt}$' if tau==0.25 else None
axes[2, 0].plot(f_vals, B_opt*B0*1e9, c='k', ls='-', label=tau_lbl)
xv = 0.50
yi = np.where(abs(f_vals - xv) == abs(f_vals - xv).min())[0][0]
yv = B_opt[yi]*B0*1e9
axes[2, 0].text(xv, yv, f'{tau:.2f}', ha='center', bbox={'facecolor':'white', 'alpha':1.0, 'edgecolor':'white'})
axes[2, 0].set_ylabel('$B$\n(nT)', rotation=0, labelpad=20, fontsize=fsize)
axes[2, 0].set_xlabel('$f$ (Hz)]', fontsize=fsize)
axes[2, 0].set_ylim(0, B_max)
axes[2, 0].set_xlim(f_vals[0], f_vals[-1])
axes[2, 0].tick_params(top=True, right=True)
m_size = 1
axes[2, 0].scatter(IF[0][:, 0], IA[0][:, 0], c='b', s=m_size, marker='.', label='$B_\\nu$')
axes[2, 0].scatter(IF[1][:, 0], IA[1][:, 0], c='r', s=m_size, marker='.', label='$B_\phi$')
#axes[3, 0].scatter(IF[2][:, 0], IA[2][:, 0], c='k', s=m_size)
# Start/Stop
axes[2, 0].scatter(IF[0][0, 0], IA[0][0, 0], c='b', s=40, marker='o')
axes[2, 0].scatter(IF[1][0, 0], IA[1][0, 0], c='r', s=40, marker='o')
axes[2, 0].scatter(IF[0][-1, 0], IA[0][-1, 0], c='b', s=40, marker='x')
axes[2, 0].scatter(IF[1][-1, 0], IA[1][-1, 0], c='r', s=40, marker='x')
axes[2, 0].legend(loc='upper right')
axes[1, 0].set_visible(False)
axes[1, 1].set_visible(False)
axes[2, 1].set_visible(False)
axes[0, 0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05, hspace=0)
fig.align_ylabels()
if save_plot == True:
save_string = parameter_time.astype(object).strftime('%Y%m%d_%H%M%S')
print('Saving plot...')
fig.savefig(_plot_path + 'NONLINEAR_TRACE_' + save_string + '.png', dpi=dpi)
plt.close('all')
else:
plt.show() | [
"[email protected]"
] | |
b4c4fc2a2bbd87a9e3d856bcc2e74c883eb189d1 | 066ee4df594a5dc90335d271b9d5a1b1e2a4d34c | /y/google-cloud-sdk/platform/google_appengine/google/appengine/ext/cloudstorage/cloudstorage_stub.py | 069b7b65ad138552819b75d0ada18b1defcf0999 | [
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-3-Clause",
"MIT",
"GPL-2.0-or-later",
"MPL-1.1",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ychen820/microblog | a2d82447525325ec58285c2e5db58b79cceaca1b | d379afa2db3582d5c3be652165f0e9e2e0c154c6 | refs/heads/master | 2021-01-20T05:58:48.424357 | 2015-04-28T22:03:09 | 2015-04-28T22:03:09 | 32,948,331 | 0 | 2 | BSD-3-Clause | 2020-07-25T05:04:35 | 2015-03-26T19:45:07 | Python | UTF-8 | Python | false | false | 18,503 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub for Google storage."""
import calendar
import datetime
import hashlib
import httplib
import StringIO
from google.appengine.api import datastore
from google.appengine.api import namespace_manager
from google.appengine.api.blobstore import blobstore_stub
from google.appengine.ext import db
from google.appengine.ext.cloudstorage import common
_GCS_DEFAULT_CONTENT_TYPE = 'binary/octet-stream'
class _AE_GCSFileInfo_(db.Model):
"""Store GCS specific info.
GCS allows user to define arbitrary metadata via header x-goog-meta-foo: bar.
These headers are returned when user does a GET or HEAD on the object.
Key name is blobkey.
"""
filename = db.StringProperty(required=True)
finalized = db.BooleanProperty(required=True)
raw_options = db.StringListProperty()
size = db.IntegerProperty()
next_offset = db.IntegerProperty(default=0)
creation = db.DateTimeProperty()
content_type = db.StringProperty(default=_GCS_DEFAULT_CONTENT_TYPE)
etag = db.ByteStringProperty()
def get_options(self):
return dict(o.split(':', 1) for o in self.raw_options)
def set_options(self, options_dict):
self.raw_options = [
'%s:%s' % (k.lower(), v) for k, v in options_dict.iteritems()]
if 'content-type' in options_dict:
self.content_type = options_dict['content-type']
options = property(get_options, set_options)
@classmethod
def kind(cls):
return blobstore_stub._GS_INFO_KIND
class _AE_GCSPartialFile_(db.Model):
"""Store partial content for uploading files."""
end = db.IntegerProperty(required=True)
partial_content = db.TextProperty(required=True)
class CloudStorageStub(object):
"""Google Cloud Storage stub implementation.
We use blobstore stub to store files. All metadata are stored
in _AE_GCSFileInfo_.
Note: this Google Cloud Storage stub is designed to work with
apphosting.ext.cloudstorage.storage_api.py.
It only implements the part of GCS storage_api.py uses, and its interface
maps to GCS XML APIs.
"""
def __init__(self, blob_storage):
"""Initialize.
Args:
blob_storage:
apphosting.api.blobstore.blobstore_stub.BlobStorage instance
"""
self.blob_storage = blob_storage
def _filename_to_blobkey(self, filename):
"""Get blobkey for filename.
Args:
filename: gcs filename of form /bucket/filename.
Returns:
blobinfo's datastore's key name, aka, blobkey.
"""
common.validate_file_path(filename)
return blobstore_stub.BlobstoreServiceStub.CreateEncodedGoogleStorageKey(
filename[1:])
@db.non_transactional
def post_start_creation(self, filename, options):
"""Start object creation with a POST.
This implements the resumable upload XML API.
Only major limitation of current implementation is that we don't
support multiple upload sessions for the same GCS file. Previous
_AE_GCSFileInfo (which represents either a finalized file, or
an upload session) will be removed when a new upload session is
created.
Args:
filename: gcs filename of form /bucket/filename.
options: a dict containing all user specified request headers.
e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}.
Returns:
a token (blobkey) used for continuing upload.
"""
ns = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('')
common.validate_file_path(filename)
token = self._filename_to_blobkey(filename)
gcs_file = _AE_GCSFileInfo_.get_by_key_name(token)
self._cleanup_old_file(gcs_file)
new_file = _AE_GCSFileInfo_(key_name=token,
filename=filename,
finalized=False)
new_file.options = options
new_file.put()
return token
finally:
namespace_manager.set_namespace(ns)
@db.non_transactional
def _cleanup_old_file(self, gcs_file):
"""Clean up the old version of a file.
The old version may or may not be finalized yet. Either way,
when user tries to create a file that already exists, we delete the
old version first.
Args:
gcs_file: an instance of _AE_GCSFileInfo_.
"""
if gcs_file:
if gcs_file.finalized:
blobkey = gcs_file.key().name()
self.blob_storage.DeleteBlob(blobkey)
else:
db.delete(_AE_GCSPartialFile_.all().ancestor(gcs_file))
gcs_file.delete()
@db.non_transactional
def put_empty(self, token):
"""Empty put is used to query upload progress.
The file must has not finished upload.
Args:
token: upload token returned by post_start_creation.
Returns:
last offset uploaded. -1 if none has been uploaded.
Raises:
ValueError: if token matches no in progress uploads.
"""
ns = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('')
gcs_file = _AE_GCSFileInfo_.get_by_key_name(token)
if not gcs_file:
raise ValueError('Invalid token', httplib.BAD_REQUEST)
return gcs_file.next_offset - 1
finally:
namespace_manager.set_namespace(ns)
@db.non_transactional
def put_continue_creation(self, token, content, content_range,
length=None,
_upload_filename=None):
"""Continue object upload with PUTs.
This implements the resumable upload XML API.
Args:
token: upload token returned by post_start_creation.
content: object content. None if no content was provided with this
PUT request.
content_range: a (start, end) tuple specifying the content range of this
chunk. Both are inclusive according to XML API. None is content is None.
length: file length, if this is the last chunk of file content.
_upload_filename: internal use. Might be removed any time! This is
used by blobstore to pass in the upload filename from user.
Returns:
_AE_GCSFileInfo entity for this file if the file is finalized.
Raises:
ValueError: if something is invalid. The exception.args is a tuple of
(msg, http status code).
"""
ns = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('')
gcs_file = _AE_GCSFileInfo_.get_by_key_name(token)
if not gcs_file:
raise ValueError('Invalid token', httplib.BAD_REQUEST)
if gcs_file.next_offset == -1:
raise ValueError('Received more uploads after file %s '
'was finalized.' % gcs_file.filename,
httplib.OK)
if content:
start, end = content_range
if len(content) != (end - start + 1):
raise ValueError('Invalid content range %d-%d' % content_range,
httplib.REQUESTED_RANGE_NOT_SATISFIABLE)
if start > gcs_file.next_offset:
raise ValueError('Expect start offset %s, got %s' %
(gcs_file.next_offset, start),
httplib.REQUESTED_RANGE_NOT_SATISFIABLE)
elif end < gcs_file.next_offset:
return
else:
content = content[gcs_file.next_offset - start:]
start = gcs_file.next_offset
blobkey = '%s-%d-%d' % (token, start, end)
self.blob_storage.StoreBlob(blobkey, StringIO.StringIO(content))
new_content = _AE_GCSPartialFile_(
parent=gcs_file,
key_name='%020d' % start,
partial_content=blobkey,
start=start,
end=end + 1)
new_content.put()
gcs_file.next_offset = end + 1
gcs_file.put()
if length is not None and length != gcs_file.next_offset:
raise ValueError(
'Got finalization request with wrong file length. '
'Expecting %s, got %s' % (gcs_file.next_offset, length),
httplib.REQUESTED_RANGE_NOT_SATISFIABLE)
elif length is not None:
return self._end_creation(token, _upload_filename)
finally:
namespace_manager.set_namespace(ns)
@db.non_transactional
def put_copy(self, src, dst, options):
"""Copy file from src to dst.
Metadata is copied.
Args:
src: /bucket/filename. This file must exist.
dst: /bucket/filename.
options: a dict containing all user specified request headers.
e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}. If None,
old metadata is copied.
"""
common.validate_file_path(src)
common.validate_file_path(dst)
ns = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('')
src_blobkey = self._filename_to_blobkey(src)
source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey)
token = self._filename_to_blobkey(dst)
new_file = _AE_GCSFileInfo_(key_name=token,
filename=dst,
finalized=True)
if options:
new_file.options = options
else:
new_file.options = source.options
new_file.etag = source.etag
new_file.size = source.size
new_file.creation = source.creation
new_file.put()
finally:
namespace_manager.set_namespace(ns)
if src_blobkey != token:
local_file = self.blob_storage.OpenBlob(src_blobkey)
self.blob_storage.StoreBlob(token, local_file)
@db.non_transactional
def _end_creation(self, token, _upload_filename):
"""End object upload.
Args:
token: upload token returned by post_start_creation.
Returns:
_AE_GCSFileInfo Entity for this file.
Raises:
ValueError: if token is invalid. Or file is corrupted during upload.
Save file content to blobstore. Save blobinfo and _AE_GCSFileInfo.
"""
gcs_file = _AE_GCSFileInfo_.get_by_key_name(token)
if not gcs_file:
raise ValueError('Invalid token')
if gcs_file.finalized:
return gcs_file
error_msg, content = self._get_content(gcs_file)
if error_msg:
raise ValueError(error_msg)
gcs_file.etag = hashlib.md5(content).hexdigest()
gcs_file.creation = datetime.datetime.utcnow()
gcs_file.size = len(content)
blob_info = datastore.Entity('__BlobInfo__', name=str(token), namespace='')
blob_info['content_type'] = gcs_file.content_type
blob_info['creation'] = gcs_file.creation
blob_info['filename'] = _upload_filename
blob_info['md5_hash'] = gcs_file.etag
blob_info['size'] = gcs_file.size
datastore.Put(blob_info)
self.blob_storage.StoreBlob(token, StringIO.StringIO(content))
gcs_file.finalized = True
gcs_file.next_offset = -1
gcs_file.put()
return gcs_file
@db.transactional(propagation=db.INDEPENDENT)
def _get_content(self, gcs_file):
"""Aggregate all partial content of the gcs_file.
Args:
gcs_file: an instance of _AE_GCSFileInfo_.
Returns:
(error_msg, content) tuple. error_msg is set if the file is
corrupted during upload. Otherwise content is set to the
aggregation of all partial contents.
"""
content = ''
previous_end = 0
error_msg = ''
for partial in (_AE_GCSPartialFile_.all(namespace='').ancestor(gcs_file).
order('__key__')):
start = int(partial.key().name())
if not error_msg:
if start < previous_end:
error_msg = 'File is corrupted due to missing chunks.'
elif start > previous_end:
error_msg = 'File is corrupted due to overlapping chunks'
previous_end = partial.end
content += self.blob_storage.OpenBlob(partial.partial_content).read()
self.blob_storage.DeleteBlob(partial.partial_content)
partial.delete()
if error_msg:
gcs_file.delete()
content = ''
return error_msg, content
@db.non_transactional
def get_bucket(self,
bucketpath,
prefix,
marker,
max_keys,
delimiter):
"""Get bucket listing with a GET.
How GCS listbucket work in production:
GCS tries to return as many items as possible in a single response. If
there are more items satisfying user's query and the current request
took too long (e.g spent on skipping files in a subdir) or items to return
gets too big (> max_keys), it returns fast and sets IsTruncated
and NextMarker for continuation. They serve redundant purpose: if
NextMarker is set, IsTruncated is True.
Note NextMarker is not where GCS scan left off. It is
only valid for the exact same type of query the marker was generated from.
For example, if a marker is generated from query with delimiter, the marker
is the name of a subdir (instead of the last file within the subdir). Thus
you can't use this marker to issue a query without delimiter.
Args:
bucketpath: gcs bucket path of form '/bucket'
prefix: prefix to limit listing.
marker: a str after which to start listing. Exclusive.
max_keys: max items we scan & return.
delimiter: delimiter for directory.
See https://developers.google.com/storage/docs/reference-methods#getbucket
for details.
Returns:
A tuple of (a list of GCSFileStat for files or directories sorted by
filename, next_marker to use as next marker, is_truncated boolean to
indicate if there are more results satisfying query).
"""
common.validate_bucket_path(bucketpath)
q = _AE_GCSFileInfo_.all(namespace='')
fully_qualified_prefix = '/'.join([bucketpath, prefix])
if marker:
q.filter('filename >', '/'.join([bucketpath, marker]))
else:
q.filter('filename >=', fully_qualified_prefix)
result = set()
name = None
first = True
first_dir = None
for info in q.run():
if not info.filename.startswith(fully_qualified_prefix):
break
if len(result) == max_keys:
break
info = db.get(info.key())
if not info:
continue
name = info.filename
if delimiter:
start_index = name.find(delimiter, len(fully_qualified_prefix))
if start_index != -1:
name = name[:start_index + len(delimiter)]
if marker and (first or name == first_dir):
first = False
first_dir = name
else:
result.add(common.GCSFileStat(name, st_size=None,
st_ctime=None, etag=None,
is_dir=True))
continue
if info.finalized:
first = False
result.add(common.GCSFileStat(
filename=name,
st_size=info.size,
st_ctime=calendar.timegm(info.creation.utctimetuple()),
etag=info.etag))
def is_truncated():
"""Check if there are more results satisfying the query."""
if not result:
return False
q = _AE_GCSFileInfo_.all(namespace='')
q.filter('filename >', name)
info = None
if delimiter and name.endswith(delimiter):
for info in q.run():
if not info.filename.startswith(name):
break
if info is not None and info.filename.startswith(name):
info = None
else:
info = q.get()
if info is None or not info.filename.startswith(fully_qualified_prefix):
return False
return True
result = list(result)
result.sort()
truncated = is_truncated()
next_marker = name if truncated else None
return result, next_marker, truncated
@db.non_transactional
def get_object(self, filename, start=0, end=None):
"""Get file content with a GET.
Args:
filename: gcs filename of form '/bucket/filename'.
start: start offset to request. Inclusive.
end: end offset to request. Inclusive.
Returns:
The segment of file content requested.
Raises:
ValueError: if file doesn't exist.
"""
common.validate_file_path(filename)
blobkey = self._filename_to_blobkey(filename)
key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey)
gcsfileinfo = db.get(key)
if not gcsfileinfo or not gcsfileinfo.finalized:
raise ValueError('File does not exist.')
local_file = self.blob_storage.OpenBlob(blobkey)
local_file.seek(start)
if end:
return local_file.read(end - start + 1)
else:
return local_file.read()
@db.non_transactional
def head_object(self, filename):
"""Get file stat with a HEAD.
Args:
filename: gcs filename of form '/bucket/filename'
Returns:
A GCSFileStat object containing file stat. None if file doesn't exist.
"""
common.validate_file_path(filename)
blobkey = self._filename_to_blobkey(filename)
key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey)
info = db.get(key)
if info and info.finalized:
metadata = common.get_metadata(info.options)
filestat = common.GCSFileStat(
filename=info.filename,
st_size=info.size,
etag=info.etag,
st_ctime=calendar.timegm(info.creation.utctimetuple()),
content_type=info.content_type,
metadata=metadata)
return filestat
return None
@db.non_transactional
def delete_object(self, filename):
"""Delete file with a DELETE.
Args:
filename: gcs filename of form '/bucket/filename'
Returns:
True if file is deleted. False if file doesn't exist.
"""
common.validate_file_path(filename)
blobkey = self._filename_to_blobkey(filename)
key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey)
gcsfileinfo = db.get(key)
if not gcsfileinfo:
return False
blobstore_stub.BlobstoreServiceStub.DeleteBlob(blobkey, self.blob_storage)
return True
| [
"[email protected]"
] | |
4e4ab4639104f571fb8a3fb079a9036e4b226ebf | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_59/321.py | 0a9767e0766e4ba935813b4c36b5d417c281ff86 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | f = open("A-large.in")
T = int(f.readline())
out = open("A-large.out", "w")
for i in range(T):
N, M = [int(x) for x in f.readline().split()]
dirs = []
for j in range(N):
dirs.append(f.readline()[1:].strip())
ndirs = []
for j in range(M):
ndirs.append(f.readline()[1:].strip())
tree = {}
for d in dirs:
cur = tree
for fold in d.split("/"):
if not fold in cur:
cur[fold] = {}
cur = cur[fold]
count = 0
for d in ndirs:
cur = tree
for fold in d.split("/"):
if not fold in cur:
count += 1
cur[fold] = {}
cur = cur[fold]
out.write("Case #%d: %d\n" % (i+1, count))
out.close()
f.close() | [
"[email protected]"
] | |
5e61a1ac0bdda6850396f1dde2ef6f5ad1ae89a0 | d125c002a6447c3f14022b786b07712a7f5b4974 | /tests/bugs/core_5647_test.py | 6270292da43b931230fb13d42925508c89a4e987 | [
"MIT"
] | permissive | FirebirdSQL/firebird-qa | 89d5b0035071f9f69d1c869997afff60c005fca9 | cae18186f8c31511a7f68248b20f03be2f0b97c6 | refs/heads/master | 2023-08-03T02:14:36.302876 | 2023-07-31T23:02:56 | 2023-07-31T23:02:56 | 295,681,819 | 3 | 2 | MIT | 2023-06-16T10:05:55 | 2020-09-15T09:41:22 | Python | UTF-8 | Python | false | false | 1,390 | py | #coding:utf-8
"""
ID: issue-5913
ISSUE: 5913
TITLE: Increase number of formats/versions of views from 255 to 32K
DESCRIPTION:
JIRA: CORE-5647
FBTEST: bugs.core_5647
"""
import pytest
from firebird.qa import *
db = db_factory()
test_script = """
set bail on;
set list on;
set term ^;
execute block returns(ret_code smallint) as
declare n int = 300;
begin
while (n > 0) do
begin
if (mod(n, 2) = 0) then
begin
in autonomous transaction do
begin
execute statement 'create or alter view vw1 (dump1) as select 1 from rdb$database';
end
end
else
begin
in autonomous transaction do
begin
execute statement 'create or alter view vw1 (dump1, dump2) as select 1, 2 from rdb$database';
end
end
n = n - 1;
end
ret_code = -abs(n);
suspend;
end ^
set term ;^
quit;
"""
act = isql_act('db', test_script)
expected_stdout = """
RET_CODE 0
"""
@pytest.mark.version('>=3.0.8')
def test_1(act: Action):
act.expected_stdout = expected_stdout
act.execute()
assert act.clean_stdout == act.clean_expected_stdout
| [
"[email protected]"
] | |
a59632cf8ae532ffe1310f5a4589020417954045 | e2f68f7f2b96af92d0d56ef9aa3119e7909cd992 | /dataplicity/m2m/remoteprocess.py | 96322c408309ccf7a475c5dba744ded0b077d147 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | anuradhawick/dataplicity-agent | c89edd563103aa251f858d38aeba8ed6c605481c | 9d4c234f0d7b24aa144a079f54883d38eb8b9f40 | refs/heads/master | 2022-04-09T18:16:18.590182 | 2020-03-26T12:10:44 | 2020-03-26T12:10:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,356 | py | """Manage a subprocess that streams to a remote side"""
from __future__ import unicode_literals
from __future__ import print_function
import json
import logging
import os
import signal
import shlex
from . import proxy
log = logging.getLogger("m2m")
class RemoteProcess(proxy.Interceptor):
"""Process managed remotely over m2m."""
def __init__(self, command, channel, user=None, group=None, size=None):
self.command = command
self.channel = channel
self.size = size
self._closed = False
self.channel.set_callbacks(
on_data=self.on_data, on_close=self.on_close, on_control=self.on_control
)
super(RemoteProcess, self).__init__(user=user, group=group, size=size)
@property
def is_closed(self):
return self._closed
def __repr__(self):
return "RemoteProcess({!r}, {!r})".format(self.command, self.channel)
def run(self):
self.spawn(shlex.split(self.command))
def on_data(self, data):
try:
self.stdin_read(data)
except Exception:
self.channel.close()
def on_control(self, data):
try:
control = json.loads(data)
except Exception:
log.exception("error decoding control")
return
control_type = control.get("type", None)
if control_type == "window_resize":
size = control["size"]
log.debug("resize terminal to {} X {}".format(*size))
self.resize_terminal(size)
else:
log.warning("unknown control packet {}".format(control_type))
def on_close(self):
self.close()
def master_read(self, data):
self.channel.write(data)
super(RemoteProcess, self).master_read(data)
def write_master(self, data):
super(RemoteProcess, self).write_master(data)
def close(self):
if not self._closed and self.pid is not None:
log.debug("sending kill signal to %r", self)
# TODO: Implement a non-blocking kill
os.kill(self.pid, signal.SIGKILL)
log.debug("waiting for %r", self)
os.waitpid(self.pid, 0)
log.debug("killed %r", self)
self._closed = True
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
| [
"[email protected]"
] | |
96cb59c86f3f04d1a645ab6dbe5dec9566815298 | f693c9c487d31a677f009afcdf922b4e7f7d1af0 | /biomixer-venv/lib/python3.9/site-packages/pylint/config/configuration_mixin.py | 2b8402b1de70ad6af27d329a9b8a26da1ae427c9 | [
"MIT"
] | permissive | Shellowb/BioMixer | 9048b6c07fa30b83c87402284f0cebd11a58e772 | 1939261589fe8d6584a942a99f0308e898a28c1c | refs/heads/master | 2022-10-05T08:16:11.236866 | 2021-06-29T17:20:45 | 2021-06-29T17:20:45 | 164,722,008 | 1 | 3 | MIT | 2022-09-30T20:23:34 | 2019-01-08T19:52:12 | Python | UTF-8 | Python | false | false | 1,105 | py | # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/LICENSE
from pylint.config.option_manager_mixin import OptionsManagerMixIn
from pylint.config.options_provider_mixin import OptionsProviderMixIn
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
"""basic mixin for simple configurations which don't need the
manager / providers model"""
def __init__(self, *args, **kwargs):
if not args:
kwargs.setdefault("usage", "")
OptionsManagerMixIn.__init__(self, *args, **kwargs)
OptionsProviderMixIn.__init__(self)
if not getattr(self, "option_groups", None):
self.option_groups = []
for _, optdict in self.options:
try:
gdef = (optdict["group"].upper(), "")
except KeyError:
continue
if gdef not in self.option_groups:
self.option_groups.append(gdef)
self.register_options_provider(self, own_group=False)
| [
"[email protected]"
] | |
42ff135cd3c609889f324bcda93251136a3573db | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/421/usersdata/309/85689/submittedfiles/tomadas.py | e849c70f0e33e8eac82fa6a18c34f43c973659d4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 560 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CODIGO AQUI
soma =1
for i in range(1,5,1) :
while (True):
t=int(input("Digite o valor de tomadas da régua %d, por gentileza:" %i))
if (t>0):
break
soma=soma+t
print ("%d"%soma)
#t2=int( input("Digite o valor de tomadas na segunda régua, por gentileza:"))
#t3=int(input("Digite o valor de tomadas na terceira régua, por gentileza:"))
#t4=int(input("Digite o valor de tomadas na quarta régua, por gentileza:"))
| [
"[email protected]"
] | |
bc624e51be65ae34c43f9fe58871a1a0b9359c12 | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/modules/network/f5/bigip_pool_member.py | b2225b411eb0334eb3d8273b4c28adc1a05c030a | [
"GPL-3.0-only",
"MIT"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 26,646 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# Copyright (c) 2013 Matt Hite <[email protected]>
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_pool_member
short_description: Manages F5 BIG-IP LTM pool members
description:
- Manages F5 BIG-IP LTM pool members via iControl SOAP API.
version_added: 1.4
options:
name:
description:
- Name of the node to create, or re-use, when creating a new pool member.
- This parameter is optional and, if not specified, a node name will be
created automatically from either the specified C(address) or C(fqdn).
- The C(enabled) state is an alias of C(present).
version_added: 2.6
state:
description:
- Pool member state.
required: True
default: present
choices:
- present
- absent
- enabled
- disabled
- forced_offline
pool:
description:
- Pool name. This pool must exist.
required: True
partition:
description:
- Partition
default: Common
address:
description:
- IP address of the pool member. This can be either IPv4 or IPv6. When creating a
new pool member, one of either C(address) or C(fqdn) must be provided. This
parameter cannot be updated after it is set.
aliases:
- ip
- host
version_added: 2.2
fqdn:
description:
- FQDN name of the pool member. This can be any name that is a valid RFC 1123 DNS
name. Therefore, the only characters that can be used are "A" to "Z",
"a" to "z", "0" to "9", the hyphen ("-") and the period (".").
- FQDN names must include at lease one period; delineating the host from
the domain. ex. C(host.domain).
- FQDN names must end with a letter or a number.
- When creating a new pool member, one of either C(address) or C(fqdn) must be
provided. This parameter cannot be updated after it is set.
aliases:
- hostname
version_added: 2.6
port:
description:
- Pool member port.
- This value cannot be changed after it has been set.
required: True
connection_limit:
description:
- Pool member connection limit. Setting this to 0 disables the limit.
description:
description:
- Pool member description.
rate_limit:
description:
- Pool member rate limit (connections-per-second). Setting this to 0
disables the limit.
ratio:
description:
- Pool member ratio weight. Valid values range from 1 through 100.
New pool members -- unless overridden with this value -- default
to 1.
preserve_node:
description:
- When state is C(absent) attempts to remove the node that the pool
member references.
- The node will not be removed if it is still referenced by other pool
members. If this happens, the module will not raise an error.
- Setting this to C(yes) disables this behavior.
type: bool
version_added: 2.1
priority_group:
description:
- Specifies a number representing the priority group for the pool member.
- When adding a new member, the default is 0, meaning that the member has no priority.
- To specify a priority, you must activate priority group usage when you
create a new pool or when adding or removing pool members. When activated,
the system load balances traffic according to the priority group number
assigned to the pool member.
- The higher the number, the higher the priority, so a member with a priority
of 3 has higher priority than a member with a priority of 1.
version_added: 2.5
fqdn_auto_populate:
description:
- Specifies whether the system automatically creates ephemeral nodes using
the IP addresses returned by the resolution of a DNS query for a node
defined by an FQDN.
- When C(yes), the system generates an ephemeral node for each IP address
returned in response to a DNS query for the FQDN of the node. Additionally,
when a DNS response indicates the IP address of an ephemeral node no longer
exists, the system deletes the ephemeral node.
- When C(no), the system resolves a DNS query for the FQDN of the node
with the single IP address associated with the FQDN.
- When creating a new pool member, the default for this parameter is C(yes).
- This parameter is ignored when C(reuse_nodes) is C(yes).
type: bool
version_added: 2.6
reuse_nodes:
description:
- Reuses node definitions if requested.
default: yes
type: bool
version_added: 2.6
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Add pool member
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: present
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
description: web server
connection_limit: 100
rate_limit: 50
ratio: 2
delegate_to: localhost
- name: Modify pool member ratio and description
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: present
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
ratio: 1
description: nginx server
delegate_to: localhost
- name: Remove pool member from pool
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: absent
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
delegate_to: localhost
- name: Force pool member offline
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
state: forced_offline
pool: my-pool
partition: Common
host: "{{ ansible_default_ipv4['address'] }}"
port: 80
delegate_to: localhost
- name: Create members with priority groups
bigip_pool_member:
server: lb.mydomain.com
user: admin
password: secret
pool: my-pool
partition: Common
host: "{{ item.address }}"
name: "{{ item.name }}"
priority_group: "{{ item.priority_group }}"
port: 80
delegate_to: localhost
loop:
- host: 1.1.1.1
name: web1
priority_group: 4
- host: 2.2.2.2
name: web2
priority_group: 3
- host: 3.3.3.3
name: web3
priority_group: 2
- host: 4.4.4.4
name: web4
priority_group: 1
'''
RETURN = '''
rate_limit:
description: The new rate limit, in connections per second, of the pool member.
returned: changed
type: int
sample: 100
connection_limit:
description: The new connection limit of the pool member
returned: changed
type: int
sample: 1000
description:
description: The new description of pool member.
returned: changed
type: string
sample: My pool member
ratio:
description: The new pool member ratio weight.
returned: changed
type: int
sample: 50
priority_group:
description: The new priority group.
returned: changed
type: int
sample: 3
fqdn_auto_populate:
description: Whether FQDN auto population was set on the member or not.
returned: changed
type: bool
sample: True
fqdn:
description: The FQDN of the pool member.
returned: changed
type: string
sample: foo.bar.com
address:
description: The address of the pool member.
returned: changed
type: string
sample: 1.2.3.4
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import is_valid_hostname
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.ipaddress import validate_ip_address
from library.module_utils.network.f5.ipaddress import validate_ip_v6_address
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import is_valid_hostname
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.ipaddress import validate_ip_address
from ansible.module_utils.network.f5.ipaddress import validate_ip_v6_address
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'rateLimit': 'rate_limit',
'connectionLimit': 'connection_limit',
'priorityGroup': 'priority_group',
}
api_attributes = [
'rateLimit', 'connectionLimit', 'description', 'ratio', 'priorityGroup',
'address', 'fqdn', 'session', 'state'
]
returnables = [
'rate_limit', 'connection_limit', 'description', 'ratio', 'priority_group',
'fqdn_auto_populate', 'session', 'state', 'fqdn', 'address'
]
updatables = [
'rate_limit', 'connection_limit', 'description', 'ratio', 'priority_group',
'fqdn_auto_populate', 'state'
]
class ModuleParameters(Parameters):
@property
def full_name(self):
delimiter = ':'
try:
if validate_ip_v6_address(self.full_name_dict['name']):
delimiter = '.'
except TypeError:
pass
return '{0}{1}{2}'.format(self.full_name_dict['name'], delimiter, self.port)
@property
def full_name_dict(self):
if self._values['name'] is None:
name = self._values['address'] if self._values['address'] else self._values['fqdn']
else:
name = self._values['name']
return dict(
name=name,
port=self.port
)
@property
def node_name(self):
return self.full_name_dict['name']
@property
def fqdn_name(self):
return self._values['fqdn']
@property
def fqdn(self):
result = {}
if self.fqdn_auto_populate:
result['autopopulate'] = 'enabled'
else:
result['autopopulate'] = 'disabled'
if self._values['fqdn'] is None:
return result
if not is_valid_hostname(self._values['fqdn']):
raise F5ModuleError(
"The specified 'fqdn' is not a valid hostname."
)
result['tmName'] = self._values['fqdn']
return result
@property
def pool(self):
return fq_name(self.want.partition, self._values['pool'])
@property
def port(self):
if 0 > int(self._values['port']) or int(self._values['port']) > 65535:
raise F5ModuleError(
"Valid ports must be in range 0 - 65535"
)
return int(self._values['port'])
@property
def address(self):
if self._values['address'] is None:
return None
elif self._values['address'] == 'any6':
return 'any6'
if is_valid_ip(self._values['address']):
return self._values['address']
raise F5ModuleError(
"The specified 'address' value is not a valid IP address."
)
@property
def state(self):
if self._values['state'] == 'enabled':
return 'present'
return self._values['state']
class ApiParameters(Parameters):
@property
def allow(self):
if self._values['allow'] is None:
return ''
if self._values['allow'][0] == 'All':
return 'all'
allow = self._values['allow']
result = list(set([str(x) for x in allow]))
result = sorted(result)
return result
@property
def rate_limit(self):
if self._values['rate_limit'] is None:
return None
if self._values['rate_limit'] == 'disabled':
return 0
return int(self._values['rate_limit'])
@property
def state(self):
if self._values['state'] in ['user-up', 'unchecked', 'fqdn-up-no-addr', 'fqdn-up'] and self._values['session'] in ['user-enabled']:
return 'present'
elif self._values['state'] in ['down', 'up'] and self._values['session'] == 'monitor-enabled':
return 'present'
elif self._values['state'] in ['user-down'] and self._values['session'] in ['user-disabled']:
return 'forced_offline'
else:
return 'disabled'
class NodeApiParameters(Parameters):
pass
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def ssl_cipher_suite(self):
default = ':'.join(sorted(Parameters._ciphers.split(':')))
if self._values['ssl_cipher_suite'] == default:
return 'default'
else:
return self._values['ssl_cipher_suite']
@property
def fqdn_auto_populate(self):
if self._values['fqdn'] is None:
return None
if 'autopopulate' in self._values['fqdn']:
if self._values['fqdn']['autopopulate'] == 'enabled':
return True
return False
@property
def fqdn(self):
if self._values['fqdn'] is None:
return None
if 'tmName' in self._values['fqdn']:
return self._values['fqdn']['tmName']
@property
def state(self):
if self._values['state'] in ['user-up', 'unchecked', 'fqdn-up-no-addr', 'fqdn-up'] and self._values['session'] in ['user-enabled']:
return 'present'
elif self._values['state'] in ['down', 'up'] and self._values['session'] == 'monitor-enabled':
return 'present'
elif self._values['state'] in ['user-down'] and self._values['session'] in ['user-disabled']:
return 'forced_offline'
else:
return 'disabled'
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def state(self):
if self.want.state == self.have.state:
return None
if self.want.state == 'forced_offline':
return {
'state': 'user-down',
'session': 'user-disabled'
}
elif self.want.state == 'disabled':
return {
'state': 'user-up',
'session': 'user-disabled'
}
elif self.want.state in ['present', 'enabled']:
return {
'state': 'user-up',
'session': 'user-enabled'
}
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state in ['present', 'present', 'enabled', 'disabled', 'forced_offline']:
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
try:
pool = self.client.api.tm.ltm.pools.pool.load(
name=self.want.pool,
partition=self.want.partition
)
except Exception as ex:
raise F5ModuleError('The specified pool does not exist')
result = pool.members_s.members.exists(
name=self.want.full_name,
partition=self.want.partition
)
return result
def node_exists(self):
resource = self.client.api.tm.ltm.nodes.node.exists(
name=self.want.node_name,
partition=self.want.partition
)
return resource
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if not self.want.preserve_node:
self.remove_node_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def _set_host_by_name(self):
if is_valid_ip(self.want.name):
self.want.update({
'fqdn': None,
'address': self.want.name
})
else:
if not is_valid_hostname(self.want.name):
raise F5ModuleError(
"'name' is neither a valid IP address or FQDN name."
)
self.want.update({
'fqdn': self.want.name,
'address': None
})
def _update_api_state_attributes(self):
if self.want.state == 'forced_offline':
self.want.update({
'state': 'user-down',
'session': 'user-disabled',
})
elif self.want.state == 'disabled':
self.want.update({
'state': 'user-up',
'session': 'user-disabled',
})
elif self.want.state in ['present', 'enabled']:
self.want.update({
'state': 'user-up',
'session': 'user-enabled',
})
def _update_address_with_existing_nodes(self):
try:
have = self.read_current_node_from_device(self.want.node_name)
if self.want.fqdn_auto_populate and self.want.reuse_nodes:
self.module.warn("'fqdn_auto_populate' is discarded in favor of the re-used node's auto-populate setting.")
self.want.update({
'fqdn_auto_populate': True if have.fqdn['autopopulate'] == 'enabled' else False
})
if 'tmName' in have.fqdn:
self.want.update({
'fqdn': have.fqdn['tmName'],
'address': 'any6'
})
else:
self.want.update({
'address': have.address
})
except Exception:
return None
def create(self):
if self.want.reuse_nodes:
self._update_address_with_existing_nodes()
if self.want.name and not any(x for x in [self.want.address, self.want.fqdn_name]):
self._set_host_by_name()
self._update_api_state_attributes()
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
pool = self.client.api.tm.ltm.pools.pool.load(
name=self.want.pool,
partition=self.want.partition
)
pool.members_s.members.create(
name=self.want.full_name,
partition=self.want.partition,
**params
)
def update_on_device(self):
params = self.changes.api_params()
pool = self.client.api.tm.ltm.pools.pool.load(
name=self.want.pool,
partition=self.want.partition
)
resource = pool.members_s.members.load(
name=self.want.full_name,
partition=self.want.partition
)
resource.modify(**params)
def absent(self):
if self.exists():
return self.remove()
elif not self.want.preserve_node and self.node_exists():
return self.remove_node_from_device()
return False
def remove_from_device(self):
pool = self.client.api.tm.ltm.pools.pool.load(
name=self.want.pool,
partition=self.want.partition
)
resource = pool.members_s.members.load(
name=self.want.full_name,
partition=self.want.partition
)
if resource:
resource.delete()
def remove_node_from_device(self):
resource = self.client.api.tm.ltm.nodes.node.load(
name=self.want.node_name,
partition=self.want.partition
)
if resource:
resource.delete()
def read_current_from_device(self):
pool = self.client.api.tm.ltm.pools.pool.load(
name=self.want.pool,
partition=self.want.partition
)
resource = pool.members_s.members.load(
name=self.want.full_name,
partition=self.want.partition
)
return ApiParameters(params=resource.attrs)
def read_current_node_from_device(self, node):
resource = self.client.api.tm.ltm.nodes.node.load(
name=node,
partition=self.want.partition
)
return NodeApiParameters(params=resource.attrs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
pool=dict(required=True),
address=dict(aliases=['host', 'ip']),
fqdn=dict(
aliases=['hostname']
),
name=dict(),
port=dict(type='int', required=True),
connection_limit=dict(type='int'),
description=dict(),
rate_limit=dict(type='int'),
ratio=dict(type='int'),
preserve_node=dict(type='bool'),
priority_group=dict(type='int'),
state=dict(
default='present',
choices=['absent', 'present', 'enabled', 'disabled', 'forced_offline']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
fqdn_auto_populate=dict(type='bool'),
reuse_nodes=dict(type='bool', default=True),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.mutually_exclusive = [
['address', 'fqdn']
]
self.required_one_of = [
['name', 'address', 'fqdn'],
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
bb6dc98e989399e948ba525a6d333964b95d3386 | df4fd380b3e1720a970573c4692eb0a32faf8f47 | /sort/insert_sort.py | 81debeb0a668d5850d4dd0f8ba1ff0eeeb77b87e | [] | no_license | Taeheon-Lee/Algorithm | 99dd21e1e0ddba31190a16d6c9646a9f393f4c4b | 64ebacf24dfdf25194b5bce39f4ce43c4bc87141 | refs/heads/master | 2023-07-10T20:26:10.121214 | 2021-08-07T17:26:26 | 2021-08-07T17:26:26 | 383,803,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | "Insert Sort 삽입 정렬"
# 아직 정렬되지 않은 임의의 데이터를 이미 정렬된 부분의 적절한 위치에 삽입해 가며 정렬하는 방식
# 선택된 키값을 앞쪽 데이터들의 키값과 비교하며 자신의 위치를 찾아 삽입하여 정렬
# 최선의 경우, n-1번 비교하면 정렬이 완료
# 최악의 경우, 모든 단계에서 앞에 놓인 데이터들을 전부 이동 "n(n-1)/2 번 수행"
# 시간 복잡도는 최선의 경우 O(n), 최악의 경우 O(n²)
def insert_sort(lst):
"삽입 정렬 함수"
length = len(lst) # 리스트 길이
for i in range(1, length): # 인덱스 0은 이미 정렬된 것으로 볼 수 있음
key = lst[i]
for j in range(i-1, -1, -1):
if lst[j] < key:
j += 1 # 중간에 조건에 의하여 탈출할 경우, 키값 삽입을 위하여 인덱스값에 1을 더함
break
lst[j+1] = lst[j] # 키값보다 큰 요소값은 오른쪽으로 이동
lst[j] = key
lst = list(map(int, input().split())) # 정렬할 리스트 입력
insert_sort(lst) # 삽입 정렬 수행
print(lst) # 결과 출력 | [
"[email protected]"
] | |
a0cc6c8dfeb588a79e84e11ed632982a28f48784 | a7122df9b74c12a5ef23af3cd38550e03a23461d | /Elementary/Three Words.py | ae2e7c11af4f31ee48707685af3bc478b686b7e3 | [] | no_license | CompetitiveCode/py.checkIO.org | c41f6901c576c614c4c77ad5c4162448828c3902 | e34648dcec54364a7006e4d78313e9a6ec6c498b | refs/heads/master | 2022-01-09T05:48:02.493606 | 2019-05-27T20:29:24 | 2019-05-27T20:29:24 | 168,180,493 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py | #Answer to Three Words - https://py.checkio.org/en/mission/three-words/
def checkio(words: str) -> bool:
word_list = words.split()
three = 0
for i in word_list:
if i.isalpha():
three += 1
else:
three = 0
if three == 3:
return True
return False
#These "asserts" using only for self-checking and not necessary for auto-testing
if __name__ == '__main__':
print('Example:')
print(checkio("Hello World hello"))
assert checkio("Hello World hello") == True, "Hello"
assert checkio("He is 123 man") == False, "123 man"
assert checkio("1 2 3 4") == False, "Digits"
assert checkio("bla bla bla bla") == True, "Bla Bla"
assert checkio("Hi") == False, "Hi"
print("Coding complete? Click 'Check' to review your tests and earn cool rewards!") | [
"[email protected]"
] | |
5d0e2596df37927c85962b5901035adae9d5f07f | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/stdlib-big-1069.py | 0eb2deca32b6d008f79111e9b825723d65fb45f8 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,001 | py | # ChocoPy library functions
def int_to_str(x: int) -> str:
digits:[str] = None
result:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str2(x: int, x2: int) -> str:
digits:[str] = None
digits2:[str] = None
result:str = ""
result2:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str3(x: int, x2: int, x3: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str4(x: int, x2: int, x3: int, x4: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str5(x: int, x2: int, x3: int, x4: int, x5: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
digits5:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
result5:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def str_to_int(x: str) -> int:
result:int = 0
digit:int = 0
char:str = ""
sign:int = 1
first_char:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = $Exp
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int2(x: str, x2: str) -> int:
result:int = 0
result2:int = 0
digit:int = 0
digit2:int = 0
char:str = ""
char2:str = ""
sign:int = 1
sign2:int = 1
first_char:bool = True
first_char2:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int3(x: str, x2: str, x3: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
char:str = ""
char2:str = ""
char3:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int4(x: str, x2: str, x3: str, x4: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int5(x: str, x2: str, x3: str, x4: str, x5: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
result5:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
digit5:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
char5:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
sign5:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
first_char5:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
# Input parameters
c:int = 42
c2:int = 42
c3:int = 42
c4:int = 42
c5:int = 42
n:int = 10
n2:int = 10
n3:int = 10
n4:int = 10
n5:int = 10
# Run [-nc, nc] with step size c
s:str = ""
s2:str = ""
s3:str = ""
s4:str = ""
s5:str = ""
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
i = -n * c
# Crunch
while i <= n * c:
s = int_to_str(i)
print(s)
i = str_to_int(s) + c
| [
"[email protected]"
] | |
d90029074a8b3cb21014a71c04d262ed15bdd2a4 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_Class2105.py | 46f966d0a752418c6145076ba857b2bce1e11250 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,100 | py | # qubit number=4
# total number=36
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=16
prog.cz(input_qubit[0],input_qubit[3]) # number=17
prog.h(input_qubit[3]) # number=18
prog.cx(input_qubit[0],input_qubit[3]) # number=33
prog.x(input_qubit[3]) # number=34
prog.cx(input_qubit[0],input_qubit[3]) # number=35
prog.h(input_qubit[3]) # number=24
prog.cz(input_qubit[0],input_qubit[3]) # number=25
prog.h(input_qubit[3]) # number=26
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=30
prog.cz(input_qubit[0],input_qubit[2]) # number=31
prog.h(input_qubit[2]) # number=32
prog.x(input_qubit[2]) # number=28
prog.cx(input_qubit[0],input_qubit[2]) # number=29
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.cx(input_qubit[3],input_qubit[2]) # number=22
prog.cx(input_qubit[2],input_qubit[0]) # number=10
prog.h(input_qubit[0]) # number=19
prog.cz(input_qubit[2],input_qubit[0]) # number=20
prog.h(input_qubit[0]) # number=21
prog.cx(input_qubit[2],input_qubit[3]) # number=15
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class2105.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"[email protected]"
] | |
2e87fd0b48a421b52fc6a50780c58fc1488e04e6 | add74ecbd87c711f1e10898f87ffd31bb39cc5d6 | /xcp2k/classes/_angles1.py | 96616c02397b22202a79759ec0c9b16dd6cb2395 | [] | no_license | superstar54/xcp2k | 82071e29613ccf58fc14e684154bb9392d00458b | e8afae2ccb4b777ddd3731fe99f451b56d416a83 | refs/heads/master | 2021-11-11T21:17:30.292500 | 2021-11-06T06:31:20 | 2021-11-06T06:31:20 | 62,589,715 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | from xcp2k.inputsection import InputSection
class _angles1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Default_keyword = []
self._name = "ANGLES"
self._repeated_default_keywords = {'Default_keyword': 'DEFAULT_KEYWORD'}
self._attributes = ['Default_keyword']
| [
"[email protected]"
] | |
1ff557c19d6ccf18f76ca6ae2e5858460cee6803 | 1811d37ed6474ab7eaeafff3c82d3bb7c0466e3d | /parts/zodiac/pyramid/scripting.py | f54f87f456200dc87c12bc8cacc3288eee7809a2 | [] | no_license | bernatcortina/zodiac | ed384fe96f6739d841a3a777d10bad4b33fd0e78 | aa0ecb2c386fc5b54ff60ba94e0a1bc5a7493f17 | refs/heads/master | 2021-01-18T14:02:44.978553 | 2014-02-07T17:33:27 | 2014-02-07T17:33:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | /Users/Bernat/GitHub/zodiac/eggs/pyramid-1.4-py2.7.egg/pyramid/scripting.py | [
"[email protected]"
] | |
3c22e9c21b6f644c8bbd5ffc9b800d480868215d | 07ec5a0b3ba5e70a9e0fb65172ea6b13ef4115b8 | /lib/python3.6/site-packages/pip/_vendor/requests/packages/urllib3/contrib/appengine.py | d71d9e21e57c5fa6a6e12c71e7f2d39ec15bdbf1 | [] | no_license | cronos91/ML-exercise | 39c5cd7f94bb90c57450f9a85d40c2f014900ea4 | 3b7afeeb6a7c87384049a9b87cac1fe4c294e415 | refs/heads/master | 2021-05-09T22:02:55.131977 | 2017-12-14T13:50:44 | 2017-12-14T13:50:44 | 118,736,043 | 0 | 0 | null | 2018-01-24T08:30:23 | 2018-01-24T08:30:22 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:35d37fc4e8032cc69d50f7bf74ddf075a9ff0c7f7e7f154da8582ad7db5ba90b
size 7937
| [
"[email protected]"
] | |
a2ca84aef2b9b69a884aa9a30a8f8b1264c9c8f1 | c2e3f66a7c26be1b12a48a464c0db0f170826433 | /WDI/lekcja 5 04.11.2020/zad20.py | 2c4959a63829e8937d95dc7a62da9da9a7a0b7d5 | [] | no_license | HITOfficial/College | 7b801b4eb93afc0b1b6a2b6f405db4161d8976d7 | 84eae6182397a9ad373718a077939461eb189fbb | refs/heads/main | 2023-08-01T01:50:07.875292 | 2021-09-20T11:21:43 | 2021-09-20T11:21:43 | 303,842,865 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,671 | py | # Dana jest tablica T[N][N] (reprezentująca szachownicę) wypełniona liczbami naturalnymi.
# Proszę napisać funkcję która ustawia na szachownicy dwie wieże, tak aby suma liczb na „szachowanych”
# przez wieże polach była największa. Do funkcji należy przekazać tablicę, funkcja powinna zwrócić położenie
# wież. Uwaga- zakładamy, że wieża szachuje cały wiersz i kolumnę z wyłączeniem pola na którym stoi
# myslałem że zadanie jest jakieś weird, ale wychodzi na to że jest luz
from random import randint
def chess_table():
chess_list = [[randint(1, 100) for _ in range(8)] for _ in range(8)]
return chess_list
def longest_row_col(chess_list):
# wstępnie zsumuję sobie każdy wiersz, i kolumnę i przypiszę je do tupla w liście (row,0,20) - (row/col, index, suma)
# print(chess_list)
sum_of_row_col = 16 * [0] # żeby sobie wczesniej zadeklarować długość listy, bo tak 3ba na zajęęciach to -> 0-7 rzędy, 8-15 kolumny
for i in range(len(chess_list)): # i - wiersz, j kolumna
sum_of_column = 0
sum_of_row = 0
for j in range(len(chess_list)):
sum_of_row += chess_list[i][j] # sumuje wiersze
sum_of_column += chess_list[j][i] # sumuje kolumny
sum_of_row_col[i] = ('ROW',i ,sum_of_row) # 0-7 będą sumy wierwszy
sum_of_row_col[8+i] = ('COL',i ,sum_of_column) # 8-15 sumy kolumn
sum_of_row_col.sort(key=lambda tup: tup[2], reverse=True)
print(sum_of_row_col[0:3])
# nie jest zrobione dokładnie bo jest łacznie z polem na którym stoi
longest_row_col(chess_table())
| [
"[email protected]"
] | |
ae84f2f4ef44978d98a6a363f6caa06b298960c2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03227/s957546769.py | 48b0de137f448123c6d2f66af5501853e90c21d3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | # -*- coding: utf-8 -*-
"""
A - Measure
https://atcoder.jp/contests/tenka1-2018-beginner/tasks/tenka1_2018_a
"""
import sys
def solve(S):
return S if len(S) == 2 else S[::-1]
def main(args):
S = input()
ans = solve(S)
print(ans)
if __name__ == '__main__':
main(sys.argv[1:]) | [
"[email protected]"
] | |
a2749475eac129473aaa17068f2336ac1eb515ee | 7b1edb732ca1159e428ecc4c2c1f5148f2ff03f0 | /0x04-python-more_data_structures/9-multiply_by_2.py | 43677229d843a9d8a1c7e025bbc2b610e5ef9fc6 | [] | no_license | maroua199525/holbertonschool-higher_level_programming | 0045b1a31438f79e0da96b97e0a4d6eee46217c8 | 1df7014614ffb4b3fb2693a9711603d939b5dc35 | refs/heads/master | 2023-08-02T04:57:16.217728 | 2021-09-22T15:59:14 | 2021-09-22T15:59:14 | 361,716,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | #!/usr/bin/python3
def multiply_by_2(a_dictionary):
dictionary = a_dictionary.copy()
value = list(map(lambda x: x * 2, dictionary.values()))
key = dictionary.keys()
for i, v in zip(key, value):
dictionary[i] = v
return (dictionary)
| [
"[email protected]"
] | |
3cfa936ed48c53fcf3e5d2235f299be8bce0cf99 | eda632b4d9f5a643bd4d086eef0860249bfbd64d | /8장/models.py | e639604d3c3ab0283ce33aa288d4887d8345c232 | [] | no_license | sohn0356-git/python_web | 508e9bc7167893426d216c067f7eb634f71c4e65 | f9b8d46df6be4b85b06daf491c3bda6a83117467 | refs/heads/master | 2021-01-13T17:59:48.799620 | 2020-03-12T00:26:14 | 2020-03-12T00:26:14 | 242,445,531 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Fcuser(db.Model):
__tablename__ = 'fcuser'
id = db.Column(db.Integer, primary_key = True)
password = db.Column(db.String(64))
userid = db.Column(db.String(32))
username = db.Column(db.String(8)) | [
"[email protected]"
] | |
3ff4e2e9c9b07471979ab4bb2934b128bc6109b7 | ba694353a3cb1cfd02a6773b40f693386d0dba39 | /sdk/python/pulumi_google_native/healthcare/v1beta1/get_dataset.py | d56e29284fb73b5d51cfe90e671e86f999afa748 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | pulumi/pulumi-google-native | cc57af8bd3d1d6b76f1f48333ed1f1b31d56f92b | 124d255e5b7f5440d1ef63c9a71e4cc1d661cd10 | refs/heads/master | 2023-08-25T00:18:00.300230 | 2023-07-20T04:25:48 | 2023-07-20T04:25:48 | 323,680,373 | 69 | 16 | Apache-2.0 | 2023-09-13T00:28:04 | 2020-12-22T16:39:01 | Python | UTF-8 | Python | false | false | 3,030 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetDatasetResult',
'AwaitableGetDatasetResult',
'get_dataset',
'get_dataset_output',
]
@pulumi.output_type
class GetDatasetResult:
def __init__(__self__, name=None, time_zone=None):
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if time_zone and not isinstance(time_zone, str):
raise TypeError("Expected argument 'time_zone' to be a str")
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name of the dataset, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> str:
"""
The default timezone used by this dataset. Must be a either a valid IANA time zone name such as "America/New_York" or empty, which defaults to UTC. This is used for parsing times in resources, such as HL7 messages, where no explicit timezone is specified.
"""
return pulumi.get(self, "time_zone")
class AwaitableGetDatasetResult(GetDatasetResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDatasetResult(
name=self.name,
time_zone=self.time_zone)
def get_dataset(dataset_id: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatasetResult:
"""
Gets any metadata associated with a dataset.
"""
__args__ = dict()
__args__['datasetId'] = dataset_id
__args__['location'] = location
__args__['project'] = project
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('google-native:healthcare/v1beta1:getDataset', __args__, opts=opts, typ=GetDatasetResult).value
return AwaitableGetDatasetResult(
name=pulumi.get(__ret__, 'name'),
time_zone=pulumi.get(__ret__, 'time_zone'))
@_utilities.lift_output_func(get_dataset)
def get_dataset_output(dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDatasetResult]:
"""
Gets any metadata associated with a dataset.
"""
...
| [
"[email protected]"
] | |
7fa89e0e08735ef2d3f91806030c4918a84d5aad | d2ca1ab6ed63983d1bd6497f26a63f0445451844 | /2015/04/fc_2015_04_08.py | 871c3052fbcb6c6fd633cdaac920497c029bd5d2 | [
"MIT"
] | permissive | mfwarren/FreeCoding | 96636367f4f4a53351535372c5691d7805199f23 | 58ac87f35ad2004a3514782556762ee0ed72c39a | refs/heads/master | 2021-01-19T14:30:09.057354 | 2015-07-05T05:59:53 | 2015-07-05T05:59:53 | 24,469,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 953 | py | #!/usr/bin/env python3
# imports go here
import datetime
from collections import defaultdict
from github import Github
import os
import plotly.plotly as py
from plotly.graph_objs import Data, Layout, Figure, Bar
#
# Free Coding session for 2015-04-08
# Written by Matt Warren
#
hub = Github(os.environ['GITHUB_USERNAME'], os.environ['GITHUB_PASSWORD'])
r = hub.get_repo('mfwarren/freecoding')
events = r.get_events()
week_ago = datetime.datetime.now() + datetime.timedelta(days=-7)
counts = defaultdict(int)
for event in events:
if event.created_at < week_ago:
break
counts[event.created_at.date()] += 1
# sort the dict
dates = list(counts.keys())
dates.sort()
event_counts = Bar(
x=dates,
y=[counts[d] for d in dates],
name='Github Events'
)
data = Data([event_counts])
layout = Layout(
barmode='stack'
)
fig = Figure(data=data, layout=layout)
plot_url = py.plot(fig, filename='Github Activity for the week')
| [
"[email protected]"
] | |
9c332035934bf11c99d4e9c9ec7262069f653b2c | d6952f048727add5b54a521d04f6c9b5889bcd35 | /test/test_dag_folder_input_alias.py | 768d7f33bd3474d3cc5bf000ee28744202ea245f | [] | no_license | TfedUD/python-sdk | bf719644041c2ab7b741af9c7fb8e5acfe085922 | 7ddc34611de44d2f9c5b217cf9b9e7cec27b2a27 | refs/heads/master | 2023-08-10T21:13:45.270193 | 2021-06-21T14:48:36 | 2021-06-21T14:51:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,866 | py | # coding: utf-8
"""
pollination-server
Pollination Server OpenAPI Definition # noqa: E501
The version of the OpenAPI document: 0.13.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import pollination_sdk
from pollination_sdk.models.dag_folder_input_alias import DAGFolderInputAlias # noqa: E501
from pollination_sdk.rest import ApiException
class TestDAGFolderInputAlias(unittest.TestCase):
"""DAGFolderInputAlias unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test DAGFolderInputAlias
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = pollination_sdk.models.dag_folder_input_alias.DAGFolderInputAlias() # noqa: E501
if include_optional :
return DAGFolderInputAlias(
annotations = {
'key' : '0'
},
default = null,
description = '0',
handler = [
pollination_sdk.models.io_alias_handler.IOAliasHandler(
annotations = {
'key' : '0'
},
function = '0',
index = 56,
language = '0',
module = 'honeybee_rhino.handlers',
type = 'IOAliasHandler', )
],
name = '0',
platform = [
'0'
],
required = True,
spec = pollination_sdk.models.spec.Spec(),
type = 'DAGFolderInputAlias'
)
else :
return DAGFolderInputAlias(
handler = [
pollination_sdk.models.io_alias_handler.IOAliasHandler(
annotations = {
'key' : '0'
},
function = '0',
index = 56,
language = '0',
module = 'honeybee_rhino.handlers',
type = 'IOAliasHandler', )
],
name = '0',
platform = [
'0'
],
)
def testDAGFolderInputAlias(self):
"""Test DAGFolderInputAlias"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
202e3ec820a2bfc5f5dd2de6968bf8fdae7afcd9 | 82a9077bcb5a90d88e0a8be7f8627af4f0844434 | /google-cloud-sdk/lib/tests/unit/surface/endpoints/quota/update_test.py | 9b8a0dd8fffe1836395bc164e16e520aba4c729b | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | piotradamczyk5/gcloud_cli | 1ae2553595e569fad6ce84af62b91a7ee5489017 | 384ece11040caadcd64d51da74e0b8491dd22ca3 | refs/heads/master | 2023-01-01T23:00:27.858583 | 2020-10-21T04:21:23 | 2020-10-21T04:21:23 | 290,238,061 | 0 | 0 | null | 2020-10-19T16:43:36 | 2020-08-25T14:31:00 | Python | UTF-8 | Python | false | false | 3,206 | py | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for endpoints quota list command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base as calliope_base
from tests.lib.surface.services import unit_test_base
class UpdateTestAlpha(unit_test_base.SCMUnitTestBase):
"""Unit tests for endpoints quota update command."""
OPERATION_NAME = 'operations/123'
OVERRIDE_ID = 'hello-override'
def PreSetUp(self):
self.track = calliope_base.ReleaseTrack.ALPHA
def testUpdate(self):
self.ExpectUpdateQuotaOverrideCall(self.mutate_limit_name,
self.mutate_metric, self.unit, 666,
self.OPERATION_NAME)
self.ExpectOperation(self.OPERATION_NAME, 3)
self.Run('endpoints quota update --service=example.googleapis.com '
'--consumer=projects/helloworld '
'--metric=example.googleapis.com/mutate_requests '
'--unit=1/min/{project} --value=666')
self.AssertErrEquals(
"""\
Operation "operations/123" finished successfully.
""",
normalize_space=True)
def testUpdate_force(self):
self.ExpectUpdateQuotaOverrideCall(
self.mutate_limit_name,
self.mutate_metric,
self.unit,
666,
self.OPERATION_NAME,
force=True)
self.ExpectOperation(self.OPERATION_NAME, 3)
self.Run('endpoints quota update --service=example.googleapis.com '
'--consumer=projects/helloworld '
'--metric=example.googleapis.com/mutate_requests '
'--unit=1/min/{project} '
'--value=666 --force')
self.AssertErrEquals(
"""\
Operation "operations/123" finished successfully.
""",
normalize_space=True)
def testUpdate_dimensions(self):
self.ExpectUpdateQuotaOverrideCall(
self.mutate_limit_name,
self.mutate_metric,
self.unit,
666,
self.OPERATION_NAME,
dimensions=[('regions', 'us-central1'), ('zones', 'us-central1-c')])
self.ExpectOperation(self.OPERATION_NAME, 3)
self.Run('endpoints quota update --service=example.googleapis.com '
'--consumer=projects/helloworld '
'--metric=example.googleapis.com/mutate_requests '
'--unit=1/min/{project} --value=666 '
'--dimensions=regions=us-central1 '
'--dimensions=zones=us-central1-c')
self.AssertErrEquals(
"""\
Operation "operations/123" finished successfully.
""",
normalize_space=True)
| [
"[email protected]"
] | |
f558ac962c214ea3311588dbcddd5abf0f98c210 | c057d05c60521096897f120c598b3ebcddb07c21 | /swagger_client/api/electioneering_api.py | 461c011d07124b4f94f5e5ba74532538c7e43540 | [] | no_license | ottomata/fec_python | 97e9ae13b37dc0e26421f7d3c682fcfb1753db4f | 70a50c0c1c90dd553a81f7b450b037646dfe4cb3 | refs/heads/master | 2022-09-27T03:22:57.054350 | 2020-06-05T16:48:59 | 2020-06-05T16:48:59 | 269,473,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44,948 | py | # coding: utf-8
"""
OpenFEC
This API allows you to explore the way candidates and committees fund their campaigns. The FEC API is a RESTful web service supporting full-text and field-specific searches on FEC data. [Bulk downloads](https://www.fec.gov/data/advanced/?tab=bulk-data) are available on the current site. Information is tied to the underlying forms by file ID and image ID. Data is updated nightly. There is a lot of data, but a good place to start is to use search to find interesting candidates and committees. Then, you can use their IDs to find report or line item details with the other endpoints. If you are interested in individual donors, check out contributor information in schedule_a. Get an [API key here](https://api.data.gov/signup/). That will enable you to place up to 1,000 calls an hour. Each call is limited to 100 results per page. You can email questions, comments or a request to get a key for 120 calls per minute to [[email protected]](mailto:[email protected]). You can also ask questions and discuss the data in the [FEC data Google Group](https://groups.google.com/forum/#!forum/fec-data). API changes will also be added to this group in advance of the change. The model definitions and schema are available at [/swagger](/swagger/). This is useful for making wrappers and exploring the data. A few restrictions limit the way you can use FEC data. For example, you can’t use contributor lists for commercial purposes or to solicit donations. [Learn more here](https://www.fec.gov/updates/sale-or-use-contributor-information/). [View our source code](https://github.com/fecgov/openFEC). We welcome issues and pull requests! # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ElectioneeringApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def committee_committee_id_electioneering_by_candidate_get(self, api_key, committee_id, **kwargs): # noqa: E501
"""committee_committee_id_electioneering_by_candidate_get # noqa: E501
Electioneering costs aggregated by candidate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.committee_committee_id_electioneering_by_candidate_get(api_key, committee_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param str committee_id: (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str state: US state or territory where a candidate runs for office
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param str district: Two-digit US House distirict of the office the candidate is running for. Presidential, Senate and House at-large candidates will have District 00.
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param str office: Federal office candidate runs for: H, S or P
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.committee_committee_id_electioneering_by_candidate_get_with_http_info(api_key, committee_id, **kwargs) # noqa: E501
else:
(data) = self.committee_committee_id_electioneering_by_candidate_get_with_http_info(api_key, committee_id, **kwargs) # noqa: E501
return data
def committee_committee_id_electioneering_by_candidate_get_with_http_info(self, api_key, committee_id, **kwargs): # noqa: E501
"""committee_committee_id_electioneering_by_candidate_get # noqa: E501
Electioneering costs aggregated by candidate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.committee_committee_id_electioneering_by_candidate_get_with_http_info(api_key, committee_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param str committee_id: (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str state: US state or territory where a candidate runs for office
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param str district: Two-digit US House distirict of the office the candidate is running for. Presidential, Senate and House at-large candidates will have District 00.
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param str office: Federal office candidate runs for: H, S or P
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'committee_id', 'sort_null_only', 'sort_hide_null', 'state', 'sort', 'page', 'district', 'election_full', 'office', 'per_page', 'sort_nulls_last', 'cycle', 'candidate_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method committee_committee_id_electioneering_by_candidate_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `committee_committee_id_electioneering_by_candidate_get`") # noqa: E501
# verify the required parameter 'committee_id' is set
if ('committee_id' not in params or
params['committee_id'] is None):
raise ValueError("Missing the required parameter `committee_id` when calling `committee_committee_id_electioneering_by_candidate_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'committee_id' in params:
path_params['committee_id'] = params['committee_id'] # noqa: E501
query_params = []
if 'sort_null_only' in params:
query_params.append(('sort_null_only', params['sort_null_only'])) # noqa: E501
if 'sort_hide_null' in params:
query_params.append(('sort_hide_null', params['sort_hide_null'])) # noqa: E501
if 'state' in params:
query_params.append(('state', params['state'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'district' in params:
query_params.append(('district', params['district'])) # noqa: E501
if 'election_full' in params:
query_params.append(('election_full', params['election_full'])) # noqa: E501
if 'office' in params:
query_params.append(('office', params['office'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort_nulls_last' in params:
query_params.append(('sort_nulls_last', params['sort_nulls_last'])) # noqa: E501
if 'cycle' in params:
query_params.append(('cycle', params['cycle'])) # noqa: E501
collection_formats['cycle'] = 'multi' # noqa: E501
if 'candidate_id' in params:
query_params.append(('candidate_id', params['candidate_id'])) # noqa: E501
collection_formats['candidate_id'] = 'multi' # noqa: E501
if 'api_key' in params:
query_params.append(('api_key', params['api_key'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/committee/{committee_id}/electioneering/by_candidate/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElectioneeringByCandidatePage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def electioneering_aggregates_get(self, api_key, **kwargs): # noqa: E501
"""electioneering_aggregates_get # noqa: E501
Electioneering communications costs aggregates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_aggregates_get(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param list[str] committee_id: A unique identifier assigned to each committee or filer registered with the FEC. In general committee id's begin with the letter C which is followed by eight digits.
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.electioneering_aggregates_get_with_http_info(api_key, **kwargs) # noqa: E501
else:
(data) = self.electioneering_aggregates_get_with_http_info(api_key, **kwargs) # noqa: E501
return data
def electioneering_aggregates_get_with_http_info(self, api_key, **kwargs): # noqa: E501
"""electioneering_aggregates_get # noqa: E501
Electioneering communications costs aggregates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_aggregates_get_with_http_info(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param list[str] committee_id: A unique identifier assigned to each committee or filer registered with the FEC. In general committee id's begin with the letter C which is followed by eight digits.
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'sort_null_only', 'sort_hide_null', 'sort', 'page', 'committee_id', 'per_page', 'sort_nulls_last', 'cycle', 'candidate_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method electioneering_aggregates_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `electioneering_aggregates_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort_null_only' in params:
query_params.append(('sort_null_only', params['sort_null_only'])) # noqa: E501
if 'sort_hide_null' in params:
query_params.append(('sort_hide_null', params['sort_hide_null'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'committee_id' in params:
query_params.append(('committee_id', params['committee_id'])) # noqa: E501
collection_formats['committee_id'] = 'multi' # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort_nulls_last' in params:
query_params.append(('sort_nulls_last', params['sort_nulls_last'])) # noqa: E501
if 'cycle' in params:
query_params.append(('cycle', params['cycle'])) # noqa: E501
collection_formats['cycle'] = 'multi' # noqa: E501
if 'candidate_id' in params:
query_params.append(('candidate_id', params['candidate_id'])) # noqa: E501
collection_formats['candidate_id'] = 'multi' # noqa: E501
if 'api_key' in params:
query_params.append(('api_key', params['api_key'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/electioneering/aggregates/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElectioneeringByCandidatePage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def electioneering_by_candidate_get(self, api_key, **kwargs): # noqa: E501
"""electioneering_by_candidate_get # noqa: E501
Electioneering costs aggregated by candidate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_by_candidate_get(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str state: US state or territory where a candidate runs for office
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param str district: Two-digit US House distirict of the office the candidate is running for. Presidential, Senate and House at-large candidates will have District 00.
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param str office: Federal office candidate runs for: H, S or P
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.electioneering_by_candidate_get_with_http_info(api_key, **kwargs) # noqa: E501
else:
(data) = self.electioneering_by_candidate_get_with_http_info(api_key, **kwargs) # noqa: E501
return data
def electioneering_by_candidate_get_with_http_info(self, api_key, **kwargs): # noqa: E501
"""electioneering_by_candidate_get # noqa: E501
Electioneering costs aggregated by candidate # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_by_candidate_get_with_http_info(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str state: US state or territory where a candidate runs for office
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param str district: Two-digit US House distirict of the office the candidate is running for. Presidential, Senate and House at-large candidates will have District 00.
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param str office: Federal office candidate runs for: H, S or P
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ElectioneeringByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'sort_null_only', 'sort_hide_null', 'state', 'sort', 'page', 'district', 'election_full', 'office', 'per_page', 'sort_nulls_last', 'cycle', 'candidate_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method electioneering_by_candidate_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `electioneering_by_candidate_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort_null_only' in params:
query_params.append(('sort_null_only', params['sort_null_only'])) # noqa: E501
if 'sort_hide_null' in params:
query_params.append(('sort_hide_null', params['sort_hide_null'])) # noqa: E501
if 'state' in params:
query_params.append(('state', params['state'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'district' in params:
query_params.append(('district', params['district'])) # noqa: E501
if 'election_full' in params:
query_params.append(('election_full', params['election_full'])) # noqa: E501
if 'office' in params:
query_params.append(('office', params['office'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort_nulls_last' in params:
query_params.append(('sort_nulls_last', params['sort_nulls_last'])) # noqa: E501
if 'cycle' in params:
query_params.append(('cycle', params['cycle'])) # noqa: E501
collection_formats['cycle'] = 'multi' # noqa: E501
if 'candidate_id' in params:
query_params.append(('candidate_id', params['candidate_id'])) # noqa: E501
collection_formats['candidate_id'] = 'multi' # noqa: E501
if 'api_key' in params:
query_params.append(('api_key', params['api_key'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/electioneering/by_candidate/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElectioneeringByCandidatePage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def electioneering_get(self, api_key, **kwargs): # noqa: E501
"""electioneering_get # noqa: E501
An electioneering communication is any broadcast, cable or satellite communication that fulfills each of the following conditions: _The communication refers to a clearly identified federal candidate._ _The communication is publicly distributed by a television station, radio station, cable television system or satellite system for a fee._ _The communication is distributed within 60 days prior to a general election or 30 days prior to a primary election to federal office._ # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_get(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_hide_null: Hide null values on sorted column(s).
:param int last_index: Index of last result from previous page
:param int page: For paginating through results, starting at page 1
:param str description:
:param str sort: Provide a field to sort by. Use - for descending order.
:param list[str] committee_id: A unique identifier assigned to each committee or filer registered with the FEC. In general committee id's begin with the letter C which is followed by eight digits.
:param str max_amount: Filter for all amounts less than a value.
:param str min_amount: Filter for all amounts greater than a value.
:param date min_date: Minimum disbursement date
:param date max_date: Maximum disbursement date
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] report_year: Forms with coverage date - year from the coverage ending date. Forms without coverage date - year from the receipt date.
:return: ElectioneeringPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.electioneering_get_with_http_info(api_key, **kwargs) # noqa: E501
else:
(data) = self.electioneering_get_with_http_info(api_key, **kwargs) # noqa: E501
return data
def electioneering_get_with_http_info(self, api_key, **kwargs): # noqa: E501
"""electioneering_get # noqa: E501
An electioneering communication is any broadcast, cable or satellite communication that fulfills each of the following conditions: _The communication refers to a clearly identified federal candidate._ _The communication is publicly distributed by a television station, radio station, cable television system or satellite system for a fee._ _The communication is distributed within 60 days prior to a general election or 30 days prior to a primary election to federal office._ # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_get_with_http_info(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_hide_null: Hide null values on sorted column(s).
:param int last_index: Index of last result from previous page
:param int page: For paginating through results, starting at page 1
:param str description:
:param str sort: Provide a field to sort by. Use - for descending order.
:param list[str] committee_id: A unique identifier assigned to each committee or filer registered with the FEC. In general committee id's begin with the letter C which is followed by eight digits.
:param str max_amount: Filter for all amounts less than a value.
:param str min_amount: Filter for all amounts greater than a value.
:param date min_date: Minimum disbursement date
:param date max_date: Maximum disbursement date
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] report_year: Forms with coverage date - year from the coverage ending date. Forms without coverage date - year from the receipt date.
:return: ElectioneeringPage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'sort_null_only', 'per_page', 'sort_hide_null', 'last_index', 'page', 'description', 'sort', 'committee_id', 'max_amount', 'min_amount', 'min_date', 'max_date', 'candidate_id', 'sort_nulls_last', 'report_year'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method electioneering_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `electioneering_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort_null_only' in params:
query_params.append(('sort_null_only', params['sort_null_only'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort_hide_null' in params:
query_params.append(('sort_hide_null', params['sort_hide_null'])) # noqa: E501
if 'last_index' in params:
query_params.append(('last_index', params['last_index'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'committee_id' in params:
query_params.append(('committee_id', params['committee_id'])) # noqa: E501
collection_formats['committee_id'] = 'multi' # noqa: E501
if 'max_amount' in params:
query_params.append(('max_amount', params['max_amount'])) # noqa: E501
if 'min_amount' in params:
query_params.append(('min_amount', params['min_amount'])) # noqa: E501
if 'min_date' in params:
query_params.append(('min_date', params['min_date'])) # noqa: E501
if 'max_date' in params:
query_params.append(('max_date', params['max_date'])) # noqa: E501
if 'candidate_id' in params:
query_params.append(('candidate_id', params['candidate_id'])) # noqa: E501
collection_formats['candidate_id'] = 'multi' # noqa: E501
if 'api_key' in params:
query_params.append(('api_key', params['api_key'])) # noqa: E501
if 'sort_nulls_last' in params:
query_params.append(('sort_nulls_last', params['sort_nulls_last'])) # noqa: E501
if 'report_year' in params:
query_params.append(('report_year', params['report_year'])) # noqa: E501
collection_formats['report_year'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/electioneering/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElectioneeringPage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def electioneering_totals_by_candidate_get(self, api_key, **kwargs): # noqa: E501
"""electioneering_totals_by_candidate_get # noqa: E501
Total electioneering communications spent on candidates by cycle or candidate election year # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_totals_by_candidate_get(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ECTotalsByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.electioneering_totals_by_candidate_get_with_http_info(api_key, **kwargs) # noqa: E501
else:
(data) = self.electioneering_totals_by_candidate_get_with_http_info(api_key, **kwargs) # noqa: E501
return data
def electioneering_totals_by_candidate_get_with_http_info(self, api_key, **kwargs): # noqa: E501
"""electioneering_totals_by_candidate_get # noqa: E501
Total electioneering communications spent on candidates by cycle or candidate election year # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.electioneering_totals_by_candidate_get_with_http_info(api_key, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_key: API key for https://api.data.gov. Get one at https://api.data.gov/signup. (required)
:param bool sort_null_only: Toggle that filters out all rows having sort column that is non-null
:param bool sort_hide_null: Hide null values on sorted column(s).
:param str sort: Provide a field to sort by. Use - for descending order.
:param int page: For paginating through results, starting at page 1
:param bool election_full: `True` indicates that full election period of a candidate. `False` indicates that two year election cycle.
:param int per_page: The number of results returned per page. Defaults to 20.
:param bool sort_nulls_last: Toggle that sorts null values last
:param list[int] cycle: Filter records to only those that were applicable to a given two-year period.The cycle begins with an odd year and is named for its ending, even year.
:param list[str] candidate_id: A unique identifier assigned to each candidate registered with the FEC. If a person runs for several offices, that person will have separate candidate IDs for each office.
:return: ECTotalsByCandidatePage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'sort_null_only', 'sort_hide_null', 'sort', 'page', 'election_full', 'per_page', 'sort_nulls_last', 'cycle', 'candidate_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method electioneering_totals_by_candidate_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params or
params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `electioneering_totals_by_candidate_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort_null_only' in params:
query_params.append(('sort_null_only', params['sort_null_only'])) # noqa: E501
if 'sort_hide_null' in params:
query_params.append(('sort_hide_null', params['sort_hide_null'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'election_full' in params:
query_params.append(('election_full', params['election_full'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort_nulls_last' in params:
query_params.append(('sort_nulls_last', params['sort_nulls_last'])) # noqa: E501
if 'cycle' in params:
query_params.append(('cycle', params['cycle'])) # noqa: E501
collection_formats['cycle'] = 'multi' # noqa: E501
if 'candidate_id' in params:
query_params.append(('candidate_id', params['candidate_id'])) # noqa: E501
collection_formats['candidate_id'] = 'multi' # noqa: E501
if 'api_key' in params:
query_params.append(('api_key', params['api_key'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKey'] # noqa: E501
return self.api_client.call_api(
'/electioneering/totals/by_candidate/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ECTotalsByCandidatePage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
f7ddb88ee40975c1f6cf4d7261152d33c5be59d8 | 43f2a863243fbb93883d10207662a626a7da5b54 | /scripts/interp_sea_level_ICESat2_ATL07.py | f73900beec9d1bdce44f7fe9ddbae6a26c87436c | [
"MIT",
"CC-BY-4.0"
] | permissive | mrsiegfried/read-ICESat-2 | cdc6f0d0963140c968e602c4b8b36c4b80e82116 | 1406b92691d284616ca6c9d72646eca4592d1f1d | refs/heads/main | 2023-06-13T21:49:46.089847 | 2021-06-15T00:11:37 | 2021-06-15T00:11:37 | 382,966,888 | 0 | 0 | MIT | 2021-07-04T23:24:17 | 2021-07-04T23:24:17 | null | UTF-8 | Python | false | false | 34,396 | py | #!/usr/bin/env python
u"""
interp_sea_level_ICESat2_ATL07.py
Written by Tyler Sutterley (05/2021)
Interpolates sea level anomalies (sla), absolute dynamic topography (adt) and
mean dynamic topography (mdt) to times and locations of ICESat-2 ATL07 data
https://www.aviso.altimetry.fr/en/data/products/sea-surface-height-products/
global/msla-h.html
ftp://ftp.sltac.cls.fr/Core/SEALEVEL_GLO_PHY_L4_REP_OBSERVATIONS_008_047/
dataset-duacs-rep-global-merged-allsat-phy-l4-v3
Note that the AVISO sea level data are gzip compressed netCDF4 files
COMMAND LINE OPTIONS:
-D X, --directory X: Working data directory
-V, --verbose: Output information about each created file
-M X, --mode X: Permission mode of directories and files created
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python
https://numpy.org
https://numpy.org/doc/stable/user/numpy-for-matlab-users.html
pyproj: Python interface to PROJ library
https://pypi.org/project/pyproj/
scikit-learn: Machine Learning in Python
https://scikit-learn.org/stable/index.html
https://github.com/scikit-learn/scikit-learn
h5py: Python interface for Hierarchal Data Format 5 (HDF5)
https://h5py.org
netCDF4: Python interface to the netCDF C library
https://unidata.github.io/netcdf4-python/netCDF4/index.html
PROGRAM DEPENDENCIES:
read_ICESat2_ATL07.py: reads ICESat-2 sea ice height data files
time.py: utilities for calculating time operations
utilities.py: download and management utilities for syncing files
UPDATE HISTORY:
Updated 05/2021: print full path of output filename
Written 03/2021
"""
from __future__ import print_function
import os
import re
import gzip
import h5py
import pyproj
import netCDF4
import argparse
import datetime
import numpy as np
import sklearn.neighbors
import icesat2_toolkit.time
from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07
#-- PURPOSE: set the hemisphere of interest based on the granule
def set_hemisphere(GRANULE):
if GRANULE in ('10','11','12'):
projection_flag = 'S'
elif GRANULE in ('03','04','05'):
projection_flag = 'N'
return projection_flag
#-- PURPOSE: interpolates to coordinates with inverse distance weighting
def inverse_distance(x, y, z, xi, yi, SEARCH='BallTree', N=10, POWER=2.0):
#-- number of output points
npts = len(xi)
#-- create neighbors object for coordinates
if (SEARCH == 'BallTree'):
tree = sklearn.neighbors.BallTree(np.c_[x,y])
elif (SEARCH == 'KDTree'):
tree = sklearn.neighbors.KDTree(np.c_[x,y])
#-- query the search tree to find the N closest points
dist,indices = tree.query(np.c_[xi,yi], k=N, return_distance=True)
#-- normalized weights if POWER > 0 (typically between 1 and 3)
#-- in the inverse distance weighting
power_inverse_distance = dist**(-POWER)
s = np.sum(power_inverse_distance, axis=1)
w = power_inverse_distance/np.broadcast_to(s[:,None],(npts,N))
#-- calculate interpolated fields by inverse distance weighting
return np.sum(w*z[indices],axis=1)
#-- PURPOSE interpolate sea level anomalies to lat/lon and then to time
def interpolate_sea_level(base_dir, xi, yi, CJD, HEM):
#-- EPSG projections for converting lat/lon to polar stereographic
EPSG = dict(N=3413,S=3031)
#-- pyproj transformer for converting to polar stereographic
crs1 = pyproj.CRS.from_string('epsg:4326')
crs2 = pyproj.CRS.from_string(EPSG[HEM])
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
#-- interpolate mean dynamic topography
input_file = 'mdt_cnes_cls2013_global.nc.gz'
#-- read bytes from compressed file
fd = gzip.open(os.path.join(base_dir,input_file),'rb')
#-- dictionary with input fields
dinput = {}
#-- read netCDF file for mean dynamic topography
with netCDF4.Dataset('mdt', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['mdt'] = np.ma.array(fileID['mdt'][0,:,:].copy(),
fill_value=fileID['mdt']._FillValue)
dinput['mdt'].mask = (dinput['mdt'].data == dinput['mdt'].fill_value)
#-- close the compressed file objects
fd.close()
#-- create 2-D grid coordinates from longitude and latitude vectors
gridlon,gridlat = np.meshgrid(dinput['lon'],dinput['lat'])
#-- convert from latitude/longitude into polar stereographic
xg,yg = transformer.transform(gridlon,gridlat)
#-- reduce to local coordinates to improve computational time
gridmask = np.logical_not(dinput['mdt'].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
#-- calculate mean dynamic topography by inverse distance weighting
MDT = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput['mdt'].data[indy,indx], xi, yi)
#-- CNES Julian Days before and after measurement
CJD1 = np.floor(CJD)
#-- scale for linearly interpolating to date
dt = (CJD - CJD1[0])
#-- output sea level anomaly and absolute dynamic topography
SLA = np.zeros_like(CJD)
ADT = np.zeros_like(CJD)
#-- for the range of dates
for day in range(2):
#-- convert from CNES Julians Days to calendar dates for time
JD1 = CJD1 + day + 2433282.5
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(JD1[0],
FORMAT='tuple', ASTYPE=int)
#-- sea level directory
ddir = os.path.join(base_dir, '{0:0.0f}'.format(YY))
#-- input file for day before the measurement
regex = re.compile(('dt_global_allsat_phy_l4_{0:4d}{1:02d}{2:02d}_'
'(\d{{4}})(\d{{2}})(\d{{2}}).nc.gz').format(YY,MM,DD))
input_file, = [fi for fi in os.listdir(ddir) if regex.match(fi)]
#-- dictionary with input fields
dinput = {}
#-- read bytes from compressed file
fd = gzip.open(os.path.join(ddir,input_file),'rb')
#-- read netCDF file for time
with netCDF4.Dataset('sla', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['sla'] = np.ma.array(fileID['sla'][0,:,:].copy(),
fill_value=fileID['sla']._FillValue)
dinput['adt'] = np.ma.array(fileID['adt'][0,:,:].copy(),
fill_value=fileID['adt']._FillValue)
#-- close the compressed file objects
fd.close()
#-- for each variable to interpolate
out = {}
for var in ['sla','adt']:
#-- reduce to local coordinates to improve computational time
gridmask = np.logical_not(dinput[var].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
#-- calculate variable by inverse distance weighting
out[var] = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput[var].data[indy,indx], xi, yi)
#-- linearly interpolate to date for iteration
SLA += out['sla']*(2.0*dt*day - dt - day + 1.0)
ADT += out['adt']*(2.0*dt*day - dt - day + 1.0)
#-- return interpolated values
return dict(h_mdt=MDT,h_sla=SLA,h_adt=ADT)
#-- PURPOSE: read ICESat-2 sea ice height (ATL07) from NSIDC
#-- interpolate AVISO sea level at points and times
def interp_sea_level_ICESat2(base_dir, FILE, VERBOSE=False, MODE=0o775):
#-- read data from input_file
print('{0} -->'.format(os.path.basename(FILE))) if VERBOSE else None
IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(FILE,
ATTRIBUTES=True)
DIRECTORY = os.path.dirname(FILE)
#-- extract parameters from ICESat-2 ATLAS HDF5 sea ice file name
rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})'
r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$')
SUB,PRD,HMN,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(FILE).pop()
#-- set the hemisphere flag based on ICESat-2 granule
HEM = set_hemisphere(HMN)
#-- HDF5 file attributes
attrib = {}
#-- mean dynamic topography
attrib['h_mdt'] = {}
attrib['h_mdt']['long_name'] = 'Mean Dynamic Topography'
attrib['h_mdt']['description'] = 'Sea surface height above geoid'
attrib['h_mdt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- sea level anomalies
attrib['h_sla'] = {}
attrib['h_sla']['long_name'] = 'Sea Level Anomaly'
attrib['h_sla']['description'] = 'Sea surface anomalies'
attrib['h_sla']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- absolute dynamic topography
attrib['h_adt'] = {}
attrib['h_adt']['long_name'] = 'Absolute Dynamic Topography'
attrib['h_adt']['description'] = ('Sea surface height above geoid calculated '
'by adding the mean dynamic topography to the sea level anomalies')
attrib['h_adt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- EPSG projections for converting lat/lon to polar stereographic
EPSG = dict(N=3413,S=3031)
#-- pyproj transformer for converting to polar stereographic
crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(EPSG[HEM]))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
#-- number of GPS seconds between the GPS epoch
#-- and ATLAS Standard Data Product (SDP) epoch
atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch']
#-- copy variables for outputting to HDF5 file
IS2_atl07_corr = {}
IS2_atl07_fill = {}
IS2_atl07_dims = {}
IS2_atl07_corr_attrs = {}
#-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
#-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
#-- Add this value to delta time parameters to compute full gps_seconds
IS2_atl07_corr['ancillary_data'] = {}
IS2_atl07_corr_attrs['ancillary_data'] = {}
for key in ['atlas_sdp_gps_epoch']:
#-- get each HDF5 variable
IS2_atl07_corr['ancillary_data'][key] = IS2_atl07_mds['ancillary_data'][key]
#-- Getting attributes of group and included variables
IS2_atl07_corr_attrs['ancillary_data'][key] = {}
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][key].items():
IS2_atl07_corr_attrs['ancillary_data'][key][att_name] = att_val
#-- for each input beam within the file
for gtx in sorted(IS2_atl07_beams):
#-- output data dictionaries for beam
IS2_atl07_corr[gtx] = dict(sea_ice_segments={})
IS2_atl07_fill[gtx] = dict(sea_ice_segments={})
IS2_atl07_dims[gtx] = dict(sea_ice_segments={})
IS2_atl07_corr_attrs[gtx] = dict(sea_ice_segments={})
#-- number of segments
val = IS2_atl07_mds[gtx]['sea_ice_segments']
n_seg = len(val['height_segment_id'])
#-- convert time from ATLAS SDP to CNES JD
#-- days relative to 1950-01-01T00:00:00
gps_seconds = atlas_sdp_gps_epoch + val['delta_time']
leap_seconds = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
cnes_time = icesat2_toolkit.time.convert_delta_time(gps_seconds-leap_seconds,
epoch1=(1980,1,6,0,0,0), epoch2=(1950,1,1,0,0,0), scale=1.0/86400.0)
#-- extract lat/lon and convert to polar stereographic
X,Y = transformer.transform(val['longitude'],val['latitude'])
#-- interpolate sea level anomalies and dynamic topographies
interp = interpolate_sea_level(base_dir,X,Y,cnes_time,HEM)
#-- group attributes for beam
IS2_atl07_corr_attrs[gtx]['Description'] = IS2_atl07_attrs[gtx]['Description']
IS2_atl07_corr_attrs[gtx]['atlas_pce'] = IS2_atl07_attrs[gtx]['atlas_pce']
IS2_atl07_corr_attrs[gtx]['atlas_beam_type'] = IS2_atl07_attrs[gtx]['atlas_beam_type']
IS2_atl07_corr_attrs[gtx]['groundtrack_id'] = IS2_atl07_attrs[gtx]['groundtrack_id']
IS2_atl07_corr_attrs[gtx]['atmosphere_profile'] = IS2_atl07_attrs[gtx]['atmosphere_profile']
IS2_atl07_corr_attrs[gtx]['atlas_spot_number'] = IS2_atl07_attrs[gtx]['atlas_spot_number']
IS2_atl07_corr_attrs[gtx]['sc_orientation'] = IS2_atl07_attrs[gtx]['sc_orientation']
#-- group attributes for sea_ice_segments
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['Description'] = ("Top group for sea "
"ice segments as computed by the ATBD algorithm.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['data_rate'] = ("Data within this "
"group are stored at the variable segment rate.")
#-- geolocation, time and segment ID
#-- delta time
IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time'] = val['delta_time'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['standard_name'] = "time"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['source'] = "telemetry"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['calendar'] = "standard"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['description'] = ("Number of "
"GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch "
"offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS "
"seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP "
"epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time "
"parameters, the time in gps_seconds relative to the GPS epoch can be computed.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['coordinates'] = \
"height_segment_id latitude longitude"
#-- latitude
IS2_atl07_corr[gtx]['sea_ice_segments']['latitude'] = val['latitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['latitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['latitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['units'] = "degrees_north"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['long_name'] = "Latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['standard_name'] = "latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['description'] = ("Latitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_min'] = -90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_max'] = 90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['coordinates'] = \
"height_segment_id delta_time longitude"
#-- longitude
IS2_atl07_corr[gtx]['sea_ice_segments']['longitude'] = val['longitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['longitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['longitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['units'] = "degrees_east"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['long_name'] = "Longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['standard_name'] = "longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['description'] = ("Longitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_min'] = -180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_max'] = 180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['coordinates'] = \
"height_segment_id delta_time latitude"
#-- segment ID
IS2_atl07_corr[gtx]['sea_ice_segments']['height_segment_id'] = val['height_segment_id']
IS2_atl07_fill[gtx]['sea_ice_segments']['height_segment_id'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['height_segment_id'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['long_name'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['description'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['coordinates'] = \
"delta_time latitude longitude"
#-- geolocation segment beginning
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_beg'] = val['geoseg_beg'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_beg'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_beg'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['long_name'] = "Beginning GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['description'] = \
"Geolocation segment (geoseg) ID associated with the first photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- geolocation segment ending
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_end'] = val['geoseg_end'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_end'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_end'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['long_name'] = "Ending GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['description'] = \
"Geolocation segment (geoseg) ID associated with the last photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- along track distance
IS2_atl07_corr[gtx]['sea_ice_segments']['seg_dist_x'] = val['seg_dist_x'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['seg_dist_x'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['seg_dist_x'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['long_name'] = "Along track distance"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['description'] = \
"Along-track distance from the equator crossing to the segment center."
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- geophysical variables
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['Description'] = ("Contains geophysical "
"parameters and corrections used to correct photon heights for geophysical effects, such as tides.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['data_rate'] = ("Data within this group "
"are stored at the sea_ice_height segment rate.")
#-- interpolated sea level products
for key,val in interp.items():
#-- copy output variables
sea_level = np.ma.zeros((n_seg))
sea_level.data[:] = np.copy(val)
#-- replace nan values with fill value
sea_level.mask = np.isnan(sea_level.data)
sea_level.data[sea_level.mask] = sea_level.fill_value
#-- add to output
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.fill_value
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'][key] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['long_name'] = attrib[key]['long_name']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['description'] = attrib[key]['description']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['source'] = 'AVISO/Copernicus'
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['reference'] = attrib[key]['reference']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['coordinates'] = \
"../height_segment_id ../delta_time ../latitude ../longitude"
#-- output HDF5 files with interpolated sea level data
fargs = (PRD,HEM,'AVISO_SEA_LEVEL',YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX)
file_format = '{0}-{1}_{2}_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5'
output_file = os.path.join(DIRECTORY,file_format.format(*fargs))
#-- print file information
print('\t{0}'.format(output_file)) if VERBOSE else None
HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_corr_attrs,
CLOBBER=True, INPUT=os.path.basename(FILE),
FILL_VALUE=IS2_atl07_fill, DIMENSIONS=IS2_atl07_dims,
FILENAME=output_file)
#-- change the permissions mode
os.chmod(output_file, MODE)
#-- PURPOSE: outputting the correction values for ICESat-2 data to HDF5
def HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_attrs, INPUT=None,
FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False):
#-- setting HDF5 clobber attribute
if CLOBBER:
clobber = 'w'
else:
clobber = 'w-'
#-- open output HDF5 file
fileID = h5py.File(os.path.expanduser(FILENAME), clobber)
#-- create HDF5 records
h5 = {}
#-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
#-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
h5['ancillary_data'] = {}
for k,v in IS2_atl07_corr['ancillary_data'].items():
#-- Defining the HDF5 dataset variables
val = 'ancillary_data/{0}'.format(k)
h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
#-- add HDF5 variable attributes
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][k].items():
h5['ancillary_data'][k].attrs[att_name] = att_val
#-- write each output beam
beams = [k for k in IS2_atl07_corr.keys() if bool(re.match(r'gt\d[lr]',k))]
for gtx in beams:
fileID.create_group(gtx)
#-- add HDF5 group attributes for beam
for att_name in ['Description','atlas_pce','atlas_beam_type',
'groundtrack_id','atmosphere_profile','atlas_spot_number',
'sc_orientation']:
fileID[gtx].attrs[att_name] = IS2_atl07_attrs[gtx][att_name]
#-- create sea_ice_segments group
fileID[gtx].create_group('sea_ice_segments')
h5[gtx] = dict(sea_ice_segments={})
for att_name in ['Description','data_rate']:
att_val = IS2_atl07_attrs[gtx]['sea_ice_segments'][att_name]
fileID[gtx]['sea_ice_segments'].attrs[att_name] = att_val
#-- delta_time, geolocation and segment identification variables
for k in ['delta_time','latitude','longitude','height_segment_id',
'geoseg_beg','geoseg_end','seg_dist_x']:
#-- values and attributes
v = IS2_atl07_corr[gtx]['sea_ice_segments'][k]
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][k]
#-- Defining the HDF5 dataset variables
val = '{0}/{1}/{2}'.format(gtx,'sea_ice_segments',k)
if fillvalue:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue,
compression='gzip')
else:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, compression='gzip')
#-- create or attach dimensions for HDF5 variable
if DIMENSIONS[gtx]['sea_ice_segments'][k]:
#-- attach dimensions
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][k]):
h5[gtx]['sea_ice_segments'][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
else:
#-- make dimension
h5[gtx]['sea_ice_segments'][k].make_scale(k)
#-- add HDF5 variable attributes
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][k].attrs[att_name] = att_val
#-- add to geophysical corrections
key = 'geophysical'
fileID[gtx]['sea_ice_segments'].create_group(key)
h5[gtx]['sea_ice_segments'][key] = {}
for att_name in ['Description','data_rate']:
att_val=IS2_atl07_attrs[gtx]['sea_ice_segments'][key][att_name]
fileID[gtx]['sea_ice_segments'][key].attrs[att_name] = att_val
for k,v in IS2_atl07_corr[gtx]['sea_ice_segments'][key].items():
#-- attributes
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][key][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][key][k]
#-- Defining the HDF5 dataset variables
val = '{0}/{1}/{2}/{3}'.format(gtx,'sea_ice_segments',key,k)
if fillvalue:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, fillvalue=fillvalue, compression='gzip')
else:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
#-- attach dimensions
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][key][k]):
h5[gtx]['sea_ice_segments'][key][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
#-- add HDF5 variable attributes
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][key][k].attrs[att_name] = att_val
#-- HDF5 file title
fileID.attrs['featureType'] = 'trajectory'
fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Height'
fileID.attrs['summary'] = ('Estimates of the sea ice correction parameters '
'needed to interpret and assess the quality of sea height estimates.')
fileID.attrs['description'] = ('The data set (ATL07) contains along-track '
'heights for sea ice and open water leads (at varying length scales) '
'relative to the WGS84 ellipsoid (ITRF2014 reference frame) after '
'adjustment for geoidal and tidal variations, and inverted barometer '
'effects.')
date_created = datetime.datetime.today()
fileID.attrs['date_created'] = date_created.isoformat()
project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = project
platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = platform
#-- add attribute for elevation instrument and designated processing level
instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
fileID.attrs['instrument'] = instrument
fileID.attrs['source'] = 'Spacecraft'
fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2'
fileID.attrs['processing_level'] = '4'
#-- add attributes for input ATL07 file
fileID.attrs['input_files'] = os.path.basename(INPUT)
#-- find geospatial and temporal ranges
lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf)
for gtx in beams:
lon = IS2_atl07_corr[gtx]['sea_ice_segments']['longitude']
lat = IS2_atl07_corr[gtx]['sea_ice_segments']['latitude']
delta_time = IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time']
#-- setting the geospatial and temporal ranges
lnmn = lon.min() if (lon.min() < lnmn) else lnmn
lnmx = lon.max() if (lon.max() > lnmx) else lnmx
ltmn = lat.min() if (lat.min() < ltmn) else ltmn
ltmx = lat.max() if (lat.max() > ltmx) else ltmx
tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
#-- add geospatial and temporal attributes
fileID.attrs['geospatial_lat_min'] = ltmn
fileID.attrs['geospatial_lat_max'] = ltmx
fileID.attrs['geospatial_lon_min'] = lnmn
fileID.attrs['geospatial_lon_max'] = lnmx
fileID.attrs['geospatial_lat_units'] = "degrees_north"
fileID.attrs['geospatial_lon_units'] = "degrees_east"
fileID.attrs['geospatial_ellipsoid'] = "WGS84"
fileID.attrs['date_type'] = 'UTC'
fileID.attrs['time_type'] = 'CCSDS UTC-A'
#-- convert start and end time from ATLAS SDP seconds into GPS seconds
atlas_sdp_gps_epoch=IS2_atl07_corr['ancillary_data']['atlas_sdp_gps_epoch']
gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx])
#-- calculate leap seconds
leaps = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
#-- convert from seconds since 1980-01-06T00:00:00 to Modified Julian days
MJD = icesat2_toolkit.time.convert_delta_time(gps_seconds - leaps,
epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0)
#-- convert to calendar date
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(MJD + 2400000.5,
FORMAT='tuple')
#-- add attributes with measurement date start, end and duration
tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]),
int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1)))
fileID.attrs['time_coverage_start'] = tcs.isoformat()
tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]),
int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1)))
fileID.attrs['time_coverage_end'] = tce.isoformat()
fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn)
#-- Closing the HDF5 file
fileID.close()
#-- Main program that calls interp_sea_level_ICESat2()
def main():
#-- Read the system arguments listed after the program
parser = argparse.ArgumentParser(
description="""Interpolates AVISO sea level anomalies, absolute
dynamic topography and mean dynamic topography to ICESat-2
ATL07 sea ice height data
"""
)
#-- command line parameters
parser.add_argument('infile',
type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+',
help='ICESat-2 ATL07 file to run')
#-- directory with sea level data
parser.add_argument('--directory','-D',
type=lambda p: os.path.abspath(os.path.expanduser(p)),
default=os.getcwd(),
help='Working data directory')
#-- verbosity settings
#-- verbose will output information about each output file
parser.add_argument('--verbose','-V',
default=False, action='store_true',
help='Output information about each created file')
#-- permissions mode of the local files (number in octal)
parser.add_argument('--mode','-M',
type=lambda x: int(x,base=8), default=0o775,
help='Permission mode of directories and files created')
args = parser.parse_args()
#-- run for each input ATL07 file
for FILE in args.infile:
interp_sea_level_ICESat2(args.directory, FILE,
VERBOSE=args.verbose, MODE=args.mode)
#-- run main program
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
612241aac8a50b5bd7b171fbe0a2fc7176d5547b | 04b494a2286e7d0ec3bbe8d25c15d575486a0f91 | /_exercises/exercise101/exercise101.py | f564c92b008b53d9142ca0ef0f10178c6cdd171b | [] | no_license | ViniciusGranado/_studies_Python | ea6adc35edccfbd81a67a613e8cd468fd8485856 | af645fa777a408a8ff1b8ed89911971f5b537ac7 | refs/heads/master | 2023-02-01T19:57:04.117047 | 2020-12-19T00:56:10 | 2020-12-19T00:56:10 | 258,855,637 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | # Functions
def vote(birth_year):
"""
:param birth_year: int. The users birth year.
:return: A string containing the age and wheter
the vote is NOT ALLOWED, OPTIONAL or
MANDATORY.
"""
from datetime import date
age = date.today().year - birth_year
return_str = f'Com {age} anos: '
if age < 16:
return_str += 'VOTO NÃO PERMITIDO'
elif age < 18 or age >= 70:
return_str += 'VOTO OPCIONAL'
else:
return_str += 'VOTO OBRIGATÓRIO'
return return_str
def get_birth_year():
"""
Get a number string by an user input, if the str is a
valid int number, return it in number format.
:return: num. A number in number format.
"""
while True:
user_number_str = input('Ano de nascimento: ').strip()
if user_number_str.isnumeric():
return int(user_number_str)
else:
print('Valor inválido.')
# Main program
users_birth_year = get_birth_year()
print(vote(users_birth_year))
| [
"[email protected]"
] | |
cf90720c6752ac6cb6cac711bf8eec906f46ce2a | 4b07658528a035c558a6757c31f7f864b781e8e9 | /fluent/example-code/03-dict-set/support/container_perftest.py | e9fc80a4f38964d236c5231e778a8c22f743ac94 | [
"MIT"
] | permissive | ghjan/fluent-python | eae307c53d0382b91f2dd71f4ab1126f93d961c7 | 68e27072f43701710fc1f2e0ed69f3c558bc579b | refs/heads/master | 2020-03-24T07:55:02.293996 | 2018-07-29T14:08:24 | 2018-07-29T14:08:24 | 142,570,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,120 | py | """
Container ``in`` operator performance test
"""
import sys
import timeit
SETUP = '''
import array
selected = array.array('d')
with open('selected.arr', 'rb') as fp:
selected.fromfile(fp, {size})
if {container_type} is dict:
haystack = dict.fromkeys(selected, 1)
else:
haystack = {container_type}(selected)
if {verbose}:
print(type(haystack), end=' ')
print('haystack: %10d' % len(haystack), end=' ')
needles = array.array('d')
with open('not_selected.arr', 'rb') as fp:
needles.fromfile(fp, 500)
needles.extend(selected[::{size}//500])
if {container_type} is set:
needles = set(needles.tolist())
if {verbose}:
print(type(needles), end=' ')
print(' needles: %10d' % len(needles), end=' ')
'''
TEST = '''
found = 0
if {container_type} is dict or {container_type} is list:
for n in needles:
if n in haystack:
found += 1
else:
found = len(needles & haystack)
if {verbose}:
print(' found: %10d' % found)
'''
def test(container_type, verbose):
MAX_EXPONENT = 7
for n in range(3, MAX_EXPONENT + 1):
size = 10 ** n
setup = SETUP.format(container_type=container_type,
size=size, verbose=verbose)
test_part = TEST.format(container_type=container_type, verbose=verbose)
tt = timeit.repeat(stmt=test_part, setup=setup, repeat=5, number=1)
print('|{:{}d}|{:f}'.format(size, MAX_EXPONENT + 1, min(tt)))
if __name__ == '__main__':
if '-v' in sys.argv:
sys.argv.remove('-v')
verbose = True
else:
verbose = False
if len(sys.argv) != 2:
print('Usage: %s <container_type>' % sys.argv[0])
else:
test(sys.argv[1], verbose)
'''
dict
| 1000|0.000137
| 10000|0.000149
| 100000|0.000284
| 1000000|0.000376
|10000000|0.000434
set
| 1000|0.000110
| 10000|0.000133
| 100000|0.000244
| 1000000|0.000348
|10000000|0.000386
list
| 1000|0.010700
| 10000|0.103838
| 100000|1.047780
| 1000000|10.561153
|10000000|105.547498
'''
| [
"[email protected]"
] | |
4748a443a829edd552558787816d88e5b52a9f9d | 9047aec2400933376e71fdc24d087d2ad35b4d45 | /minSteps_1347.py | 770c0d81af89a4f15a52f640b29b27fbd6c500d8 | [] | no_license | sasankyadavalli/leetcode | a8c3a4b63970cfa67a8bbec5d1fb7cca818f7ea9 | 555931bc5a74e0031726070be90c945da9cb3251 | refs/heads/master | 2021-02-07T12:12:06.938562 | 2020-07-28T18:25:10 | 2020-07-28T18:25:10 | 244,024,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | class Solution:
def minSteps(self, s: str, t: str) -> int:
d1 = {}
steps = 0
for ele in s:
if ele in d1.keys():
d1[ele] +=1
else:
d1[ele] = 1
for i in t:
if i in d1 and d1[i] > 0:
d1[i] -= 1
else:
steps += 1
return steps | [
"[email protected]"
] | |
8befc2b3af60d0d72bfc6445f068647e539b68e2 | 826ded51e15bf5c4e1f3a202b8f58764e56ee742 | /virtual/bin/confusable_homoglyphs | 1abf67144eb83ea41671d5a3125599be22e887ee | [] | no_license | LeoAmby/webly | 0473766476ec93f0bdb0ed09512c9045cc6948d2 | 85cf765c654324f5aea27562c3a2b90e665a88b3 | refs/heads/master | 2022-11-30T21:17:49.065335 | 2019-10-31T11:28:45 | 2019-10-31T11:28:45 | 216,031,732 | 0 | 0 | null | 2022-11-22T04:35:55 | 2019-10-18T13:35:48 | Python | UTF-8 | Python | false | false | 303 | #!/home/moringa/Documents/MoringaSchool-Projects/Core-Projects/Django/webly/virtual/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from confusable_homoglyphs.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cli())
| [
"[email protected]"
] | ||
ad6cfbbb49e897d10a9a2e9d3b5c73a7dd860b8d | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.4/tests/regressiontests/context_processors/urls.py | 00733253b4a73fce10bdfbc64b1b05f237247246 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.4/tests/regressiontests/context_processors/urls.py | [
"[email protected]"
] | |
384a851a30913f3137316f99735cf23dd303b3f8 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc055/B/4903050.py | 7731624b54ba064fa0b812064ee63aa67d499a16 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | N = int(input())
ans = 1
for i in range(1,N+1):
a = i % (10 ** 9 + 7)
ans *= a
ans = ans % (10 ** 9 + 7)
print(ans) | [
"[email protected]"
] | |
63b0012fef513a4036e954d58f9db81a958f3b1f | 31ab401afc2c99d85765bec73204e4009a80993d | /비밀지도.py | 987f993508357810019aadf595e373fa247d045a | [] | no_license | rheehot/Algorithm-coding_test | bdd700acda13ddbef2aa9c8c77d010b440c82964 | 3aca4ae1d60130515f7b9a0f85ef0625faf3c298 | refs/heads/master | 2023-02-20T01:21:20.015565 | 2021-01-24T03:47:45 | 2021-01-24T03:47:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | # 나의 풀이
def solution(d, budget):
cnt = 0
num = 0
if sum(d) > budget:
d = sorted(d)
for i in d:
num += i
cnt += 1
if num > int(budget):
cnt -= 1
break
return cnt
else:
return len(d)
# 다른 사람의 풀이
def solution(d, budget):
d.sort()
while budget < sum(d):
d.pop()
return len(d) | [
"[email protected]"
] | |
c2c65dbf9cb33392837f4936001f292cac812aa8 | 09c00563e44d0e4b62449f1648ba1c595a4fa455 | /Python_File_IO_Pty_Fork.py | 0743c2633d406ecd6af9216f4e7743dbfc60fddf | [] | no_license | VakinduPhilliam/Python_File_IO | 20da71688384fe145873ae5ade064b37be6c67d6 | 016d7d6d5315def35d82354e9d46a627afa2f271 | refs/heads/master | 2020-05-27T17:54:15.751253 | 2019-05-26T21:31:10 | 2019-05-26T21:31:10 | 188,731,817 | 5 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 1,685 | py | # Python File IO
# pty — Pseudo-terminal utilities.
# The pty module defines operations for handling the pseudo-terminal concept: starting another process and being able
# to write to and read from its controlling terminal programmatically.
# pty.fork()
# Fork. Connect the child’s controlling terminal to a pseudo-terminal.
# Return value is (pid, fd). Note that the child gets pid 0, and the fd is invalid.
# The parent’s return value is the pid of the child, and fd is a file descriptor connected to the child’s controlling
# terminal (and also to the child’s standard input and output).
#
# The following program acts like the Unix command script(1), using a pseudo-terminal to record all input and output of
# a terminal session in a “typescript”.
#
import argparse
import os
import pty
import sys
import time
parser = argparse.ArgumentParser()
parser.add_argument('-a', dest='append', action='store_true')
parser.add_argument('-p', dest='use_python', action='store_true')
parser.add_argument('filename', nargs='?', default='typescript')
options = parser.parse_args()
shell = sys.executable if options.use_python else os.environ.get('SHELL', 'sh')
filename = options.filename
mode = 'ab' if options.append else 'wb'
with open(filename, mode) as script:
def read(fd):
data = os.read(fd, 1024)
script.write(data)
return data
print('Script started, file is', filename)
script.write(('Script started on %s\n' % time.asctime()).encode())
pty.spawn(shell, read)
script.write(('Script done on %s\n' % time.asctime()).encode())
print('Script done, file is', filename)
| [
"[email protected]"
] | |
ad214cee844e8e823915ece27cc7713e3373e328 | 5a07828016e8bafbea5dac8f83c8bfd5d0bfd603 | /py_93w93/140318_eiz.py | 44744b47e9e56314ce840f16e1483c47051102c8 | [] | no_license | JJHopkins/rajter_compare | db5b88d2c6c1efc0fead9b6ed40fb3cce36bedb4 | 2ba52f4f16cf2aca350a82ea58d0aa8f8866c47c | refs/heads/master | 2020-06-04T23:53:57.089329 | 2014-04-08T18:02:30 | 2014-04-08T18:02:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,210 | py | #!/usr/bin/python
import matplotlib
#import pyreport
import numpy as np
from pylab import *
#from pylab import show
from matplotlib import pyplot as pl
x_x,y_x_unsc = np.loadtxt('data/CNT9_3_xe2_solid_30.txt',unpack=True, usecols = [0,1])
x_z,y_z_unsc = np.loadtxt('data/CNT9_3_ze2_solid_30.txt',unpack=True, usecols = [0,1])
x_w,y_w = np.loadtxt('data/water-L.txt',unpack=True, usecols = [0,1])
y_x = y_x_unsc*1.#4.949
y_z = y_z_unsc*1.#4.949
def Aiz(perp, par,med):
return (2.0*(perp-med)*med)/((perp+med)*(par-med))
## DEFINE FUNCTIONS FOR CALCULATING e(iz)
#-------------------------------------------------------------
# Matsubara frequencies: z_n at room temp is (2pikbT/hbar)*n (ie coeff*n)
coeff = 0.159 # in eV #(2.41*1e14) # in rad/s
#coeff = 2.41e14 # in (1 rad)*(1/s)=inverse seconds
T = 297.0
#kb_J = 1.3806488e-23 # in J/K
#hbar = 6.625e-34 # in J/s
#coeff_J = 2.0*np.pi*kb_J*T/hbar#1.602e-19*0.159e15 # in eV #(2.41*1e14) # in rad/s
n = arange(0,500)
z = n * coeff
#coeff_J = 1.602e-19*0.159e15 # in eV #(2.41*1e14) # in rad/s
#z = n * coeff
#z = n * coeff_J
eiz_x = empty(len(z))
eiz_z = empty(len(z))
eiz_w = empty(len(z))
eiz_x_arg=empty(len(x_x))
eiz_z_arg=empty(len(x_z))
eiz_w_arg=empty(len(x_w))
for j in range(len(z)):
for i in range(len(x_x)):
eiz_x_arg[i]=x_x[i]*y_x[i] / (x_x[i]**2 + z[j]**2)
eiz_x[j] = 1 + (2./pi) * trapz(eiz_x_arg,x_x)
for m in range(len(x_z)):
eiz_z_arg[m]=x_z[m]*y_z[m] / (x_z[m]**2 + z[j]**2)
eiz_z[j] = 1 + (2./pi) * trapz(eiz_z_arg,x_z)
for p in range(len(x_w)):
eiz_w_arg[p]=x_w[p]*y_w[p] / (x_w[p]**2 + z[j]**2)
eiz_w[j] = 1 + (2./pi) * trapz(eiz_w_arg,x_w)
#
savetxt("data/eiz_x_output_eV.txt", eiz_x)
savetxt("data/eiz_z_output_eV.txt", eiz_z)
savetxt("data/eiz_w_output_eV.txt", eiz_w)
a = Aiz(eiz_x,eiz_z,eiz_w)
pl.figure()
pl.plot(x_x,y_x, color = 'b', label = r'$\varepsilon^{\prime\prime}_\hat{x}(\omega)$')
pl.plot(x_z,y_z, color = 'r', label = r'$\varepsilon^{\prime\prime}_\hat{z}(\omega)$')
pl.plot(x_z,y_z, color = 'r', label = r'$first\,peak:\,\,%6.2f$'%max(y_z))
pl.plot(x_w,y_w, color = 'c', label = r'$\varepsilon^{\prime\prime}_{H_{2}O}(\omega)$')
pl.axis([0,35,0,25])
pl.xlabel(r'$\hbar\omega\,\,\,[eV]$', size = 24)
pl.ylabel(r'$\varepsilon^{\prime\prime}(\omega)$', size = 24)
pl.legend()
pl.title(r'[9,3] and water eps2')
pl.savefig('plots/93w93_eps2.pdf')
pl.show()
#
fig = pl.figure()
ax = fig.add_axes([0.1,0.1,0.8,0.8])
ax.plot(n,eiz_x, color = 'b', label = r'$\varepsilon_{\hat{x}}(i\zeta_{N})$')
ax.plot(n,eiz_z, color = 'r', label = r'$\varepsilon_{\hat{z}}(i\zeta_{n})$')
ax.plot(n,eiz_z, color = 'r', label = r'$max\,%6.2f$'%max(eiz_z))
ax.plot(n,eiz_w, color = 'c', label = r'$\varepsilon_{\hat{w}}(i\zeta_{n})$')
pl.axis([0,500,0,10])
pl.xlabel(r'$N$', size = 24)
pl.ylabel(r'$\varepsilon(i\zeta)$', size = 24)
#pl.legend()
pl.title(r'[9,3] and water eiz')
ax_inset = fig.add_axes([0.53,0.50,0.36,0.36])
ax_inset.plot(n, a,'k-.', linewidth = 2)#,label=r'$a(i\xi_{N})$')
pl.tick_params(labelsize = 'small')
pl.xlabel(r'$N$', size = 14)
pl.ylabel(r'$a(i\xi)$', size = 14)
pl.savefig('plots/93w93_eiz.pdf')
pl.show()
| [
"[email protected]"
] | |
14476f6a4599c9269c4a8e5b073ec545726b44d2 | 2635edb96afa8117d4584a470061e447b79adc6e | /life/models.py | 37074983630a300a534a3e96598353345abd2c40 | [] | no_license | Mark-Seaman/Sensei-2018 | 673609731ecb5ebb782dab94b2cf3d7c22940424 | 06b02892cfe1bf1d25cb4224e86eb693c82b0f29 | refs/heads/master | 2022-02-18T19:14:10.343093 | 2022-01-15T20:06:21 | 2022-01-15T20:06:21 | 158,728,468 | 0 | 0 | null | 2022-01-16T21:06:09 | 2018-11-22T16:51:55 | HTML | UTF-8 | Python | false | false | 1,045 | py | from __future__ import unicode_literals
from django.db import models
class Year(models.Model):
age = models.IntegerField()
contribute = models.TextField()
relate = models.TextField()
learn = models.TextField()
enjoy = models.TextField()
def __unicode__(self):
return str(self.age)
class Aspect(models.Model):
name = models.CharField(max_length=100)
def __unicode__(self):
return str(self.name)
class Experience(models.Model):
age = models.IntegerField(editable=False)
aspect = models.ForeignKey(Aspect, on_delete=models.CASCADE, editable=False)
summary = models.TextField()
def __unicode__(self):
return "age %s - %s" % (self.age, self.aspect.name)
def get_absolute_url(self):
return '/life/year/%s' % self.age
def initialize():
Aspect.objects.create(name='Contribute')
Aspect.objects.create(name='Relate')
Aspect.objects.create(name='Learn')
Aspect.objects.create(name='Enjoy')
for a in Aspect.objects.all():
print(a)
| [
"[email protected]"
] | |
b1adf4597ffc1d7b98b6e5894e2b69610f88ec25 | de33091037128fe3feb5a6dad28be5b72aed86a0 | /g4g/Amazon/Easy/greater-on-right-side.py | ec15ed83e0616ea9bf406e0ca8fc1d1f4914e3e2 | [] | no_license | khannasarthak/codingPrep | 78fbf089c6f095f7ec8a5f5d9998593aea2942fe | 2b638ab284bf9fefa2259fd7aa4ca3905438b7ab | refs/heads/master | 2021-01-12T02:58:04.236524 | 2018-11-07T06:03:37 | 2018-11-07T06:03:37 | 78,140,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | # USING MAX
t = int(input())
for p in (range(t)):
n = int(input())
a = list(map(int,input().split()))
op = []
for i in range(len(a)-1):
op.append(max(a[i+1:]))
op.append(-1)
print (*op)
# SECOND SOLUTION WITHOUT MAX
t = int(input())
for p in (range(t)):
n = int(input())
a = list(map(int,input().split()))
l = len(a)
op = []
maxr = a[-1]
for i in range(l-2,-1,-1):
tmp = a[i]
a[i] = maxr
if maxr<tmp:
maxr = tmp
a[-1] = -1
print (*a)
| [
"[email protected]"
] | |
dfb5a65a4ca0886e97fb23f03b987514cc0edc0f | 4b68243d9db908945ee500174a8a12be27d150f9 | /pogoprotos/enums/variable_name_pb2.py | cd222f47297fec06b2cc4a740ef6ca9f93b9e07f | [] | no_license | ykram/pogoprotos-py | 7285c86498f57dcbbec8e6c947597e82b2518d80 | a045b0140740625d9a19ded53ece385a16c4ad4a | refs/heads/master | 2020-04-20T10:19:51.628964 | 2019-02-02T02:58:03 | 2019-02-02T02:58:03 | 168,787,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 9,227 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/enums/variable_name.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/enums/variable_name.proto',
package='pogoprotos.enums',
syntax='proto3',
serialized_pb=_b('\n$pogoprotos/enums/variable_name.proto\x12\x10pogoprotos.enums*\xa7\x07\n\x0cVariableName\x12\x17\n\x13UNSET_VARIABLE_NAME\x10\x00\x12\r\n\tCODE_NAME\x10\x01\x12\x08\n\x04TEAM\x10\x02\x12\t\n\x05LEVEL\x10\x03\x12\x0e\n\nEXPERIENCE\x10\x04\x12\x14\n\x10POKECOIN_BALANCE\x10\x05\x12\x14\n\x10STARDUST_BALANCE\x10\x06\x12\t\n\x05\x45MAIL\x10\x07\x12\x10\n\x0cLOGIN_METHOD\x10\x08\x12\x0b\n\x06GYM_ID\x10\xe8\x07\x12\r\n\x08GYM_NAME\x10\xe9\x07\x12\x14\n\x0fPOKEMON_DISPLAY\x10\xea\x07\x12\x19\n\x14POKEDEX_ENTRY_NUMBER\x10\xeb\x07\x12\x0f\n\nPOKEMON_ID\x10\xec\x07\x12\x15\n\x10POKEMON_NICKNAME\x10\xed\x07\x12\x1c\n\x17GYM_BADGE_EARNED_POINTS\x10\xee\x07\x12\x17\n\x12GYM_BADGE_PROGRESS\x10\xef\x07\x12\x13\n\x0eGYM_BADGE_RANK\x10\xf0\x07\x12\x18\n\x13GYM_BADGE_IMAGE_URL\x10\xf1\x07\x12\x17\n\x12GYM_BADGE_LEVEL_UP\x10\xf2\x07\x12\x15\n\x10POKECOIN_AWARDED\x10\xf3\x07\x12\x1b\n\x16POKECOIN_AWARDED_TODAY\x10\xf4\x07\x12\x17\n\x12MAX_DAILY_POKECOIN\x10\xf5\x07\x12\x10\n\x0b\x42\x41TTLES_WON\x10\xf6\x07\x12\x11\n\x0c\x42\x41TTLES_LOST\x10\xf7\x07\x12\x14\n\x0f\x44\x45PLOYED_MILLIS\x10\xf8\x07\x12\x0e\n\tRAID_SEED\x10\xf9\x07\x12%\n EXCLUSIVE_RAID_CANCELLATION_INFO\x10\xfa\x07\x12\x14\n\x0fGIFTBOX_DETAILS\x10\xfb\x07\x12\x12\n\rFRIEND_AVATAR\x10\xfc\x07\x12\x10\n\x0b\x46RIEND_TEAM\x10\xfd\x07\x12\x14\n\x0f\x46RIEND_CODENAME\x10\xfe\x07\x12\x14\n\x0fGIFT_LOOT_ITEMS\x10\xff\x07\x12\r\n\x08GIFT_EGG\x10\x80\x08\x12(\n#FRIENDSHIP_MILESTONE_REWARD_DETAILS\x10\x81\x08\x12\x1d\n\x18\x46RIENDSHIP_LEVEL_DISPLAY\x10\x82\x08\x12\"\n\x1d\x42GMODE_BUDDY_POKEMON_NICKNAME\x10\x83\x08\x12\x15\n\x10\x43OMBAT_CHALLENGE\x10\x84\x08\x12\x1f\n\x1a\x43OMBAT_CHALLENGER_CODENAME\x10\x85\x08\x12#\n\x1e\x42GMODE_OFF_SESSION_DISTANCE_KM\x10\x86\x08\x12\r\n\x08POI_NAME\x10\x87\x08\x62\x06proto3')
)
_VARIABLENAME = _descriptor.EnumDescriptor(
name='VariableName',
full_name='pogoprotos.enums.VariableName',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET_VARIABLE_NAME', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CODE_NAME', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TEAM', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LEVEL', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXPERIENCE', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKECOIN_BALANCE', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STARDUST_BALANCE', index=6, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EMAIL', index=7, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOGIN_METHOD', index=8, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_ID', index=9, number=1000,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_NAME', index=10, number=1001,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKEMON_DISPLAY', index=11, number=1002,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKEDEX_ENTRY_NUMBER', index=12, number=1003,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKEMON_ID', index=13, number=1004,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKEMON_NICKNAME', index=14, number=1005,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_BADGE_EARNED_POINTS', index=15, number=1006,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_BADGE_PROGRESS', index=16, number=1007,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_BADGE_RANK', index=17, number=1008,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_BADGE_IMAGE_URL', index=18, number=1009,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GYM_BADGE_LEVEL_UP', index=19, number=1010,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKECOIN_AWARDED', index=20, number=1011,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKECOIN_AWARDED_TODAY', index=21, number=1012,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAX_DAILY_POKECOIN', index=22, number=1013,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATTLES_WON', index=23, number=1014,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATTLES_LOST', index=24, number=1015,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEPLOYED_MILLIS', index=25, number=1016,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RAID_SEED', index=26, number=1017,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXCLUSIVE_RAID_CANCELLATION_INFO', index=27, number=1018,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GIFTBOX_DETAILS', index=28, number=1019,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FRIEND_AVATAR', index=29, number=1020,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FRIEND_TEAM', index=30, number=1021,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FRIEND_CODENAME', index=31, number=1022,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GIFT_LOOT_ITEMS', index=32, number=1023,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GIFT_EGG', index=33, number=1024,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FRIENDSHIP_MILESTONE_REWARD_DETAILS', index=34, number=1025,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FRIENDSHIP_LEVEL_DISPLAY', index=35, number=1026,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BGMODE_BUDDY_POKEMON_NICKNAME', index=36, number=1027,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COMBAT_CHALLENGE', index=37, number=1028,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COMBAT_CHALLENGER_CODENAME', index=38, number=1029,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BGMODE_OFF_SESSION_DISTANCE_KM', index=39, number=1030,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POI_NAME', index=40, number=1031,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=59,
serialized_end=994,
)
_sym_db.RegisterEnumDescriptor(_VARIABLENAME)
VariableName = enum_type_wrapper.EnumTypeWrapper(_VARIABLENAME)
UNSET_VARIABLE_NAME = 0
CODE_NAME = 1
TEAM = 2
LEVEL = 3
EXPERIENCE = 4
POKECOIN_BALANCE = 5
STARDUST_BALANCE = 6
EMAIL = 7
LOGIN_METHOD = 8
GYM_ID = 1000
GYM_NAME = 1001
POKEMON_DISPLAY = 1002
POKEDEX_ENTRY_NUMBER = 1003
POKEMON_ID = 1004
POKEMON_NICKNAME = 1005
GYM_BADGE_EARNED_POINTS = 1006
GYM_BADGE_PROGRESS = 1007
GYM_BADGE_RANK = 1008
GYM_BADGE_IMAGE_URL = 1009
GYM_BADGE_LEVEL_UP = 1010
POKECOIN_AWARDED = 1011
POKECOIN_AWARDED_TODAY = 1012
MAX_DAILY_POKECOIN = 1013
BATTLES_WON = 1014
BATTLES_LOST = 1015
DEPLOYED_MILLIS = 1016
RAID_SEED = 1017
EXCLUSIVE_RAID_CANCELLATION_INFO = 1018
GIFTBOX_DETAILS = 1019
FRIEND_AVATAR = 1020
FRIEND_TEAM = 1021
FRIEND_CODENAME = 1022
GIFT_LOOT_ITEMS = 1023
GIFT_EGG = 1024
FRIENDSHIP_MILESTONE_REWARD_DETAILS = 1025
FRIENDSHIP_LEVEL_DISPLAY = 1026
BGMODE_BUDDY_POKEMON_NICKNAME = 1027
COMBAT_CHALLENGE = 1028
COMBAT_CHALLENGER_CODENAME = 1029
BGMODE_OFF_SESSION_DISTANCE_KM = 1030
POI_NAME = 1031
DESCRIPTOR.enum_types_by_name['VariableName'] = _VARIABLENAME
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
f8e843416fa7b2feb722ee1f940bc09e65143ea4 | 4e8dc479fbf28d34fa1678c14ef02f3aca31d46a | /Arrays_and_Strings.md/String_Compression(1).py | c52c58b4f06b85ecece2fd1f1664495ef14b09a1 | [] | no_license | fagan2888/Leetcode-2 | b0ace8e0695875bdcd61acdec33c45b5d1d52247 | 4175d14985172eabee0a49a821eaeaf57a5b6593 | refs/heads/master | 2020-12-02T23:08:23.519672 | 2019-02-06T22:20:03 | 2019-02-06T22:20:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py |
# coding: utf-8
# In[3]:
def compressed(s):
count=1
new=[]
for i in range(len(s)-1):
if(s[i]==s[i+1]):
count+=1
else:
new.extend([s[i],str(count)])
count=1
return ''.join(new)
st=input('Enter string: ')
print('Compressed string is: ',compressed(st))
| [
"[email protected]"
] | |
fb4a313bf56af0e92e006eb7f102755ba3bcbf98 | bdf3879b183611fef4239ece505f9f9a05fe49aa | /work/work1.py | 20963567d8a0b4d994237ce241bccfbf6f749825 | [] | no_license | Prabithapallat01/pythondjangoluminar | 9578fccd1628ed0810000cbec1ab97ac8d931bf9 | 3248939c0d454084326f9694ecb3094f599c0b6b | refs/heads/master | 2023-03-20T10:22:54.524782 | 2021-03-10T08:43:01 | 2021-03-10T08:43:01 | 327,939,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # # [1,2,3,4,5,6] [4,5,6,1,2,3]
# #lst=[1,2,3,4,5,6]
# def leftRotate(lst, d, n):
# for i in range(d):
# leftRotatebyOne(lst, n)
#
#
# # Function to left Rotate arr[] of size n by 1*/
# def leftRotatebyOne(lst, n):
# temp = lst[0]
# for i in range(n - 1):
# lst[i] = lst[i + 1]
# lst[n - 1] = temp
#
#
# # utility function to print an array */
# def printlist(lst, size):
# for i in range(size):
# print("% d" % lst[i], end=" ")
#
#
# # Driver program to test above functions */
# lst= [1, 2, 3, 4, 5, 6, ]
# leftRotate(lst, 3, 6)
# printlist(lst,6)
lst=[1,2,3,4,5,6]
print("Before Rotation:",lst)
d=3
n=6
def rotation(lst,d):
for i in range(0,d):
temp=lst[0] #TEMP=1
for j in range(0,n-1): #((0,5)j=0, j=1 j=2 j=3 j=4
lst[j]=lst[j+1] # lst[0]=lst[1], lst[0]=2 lst[1]=3 lst[2]=4 lst[3]=5 lst[4]=6
lst[n-1]=temp
return lst
rotatedlist=rotation(lst,d)
print("Before Rotation:",rotatedlist)
| [
"[email protected]"
] | |
fe21a650d7becd85577455403863b65cb3d04236 | 859ca4b957af0c3486ed11bbf87c1b18d6e589a0 | /src/rathings/notebooks/SpectralIndexCalc.py | 2027453412f7e7eb0bd5bc0b3c557644e9ae9810 | [
"Apache-2.0"
] | permissive | Joshuaalbert/RadioAstronomyThings | b5f84b15d8d787d38dab90b05b6905b1b74acf18 | c72d6e5af8b0831a2e0e86d2d99ca894ad7f43ff | refs/heads/master | 2020-04-12T06:40:28.737231 | 2018-07-24T09:44:30 | 2018-07-24T09:44:30 | 60,545,956 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,389 | py |
# coding: utf-8
# In[ ]:
'''Compute the posterior estimates of spectral index, S1.4GHz, and P1.4GHz
as well as the posterior estimates of measured fluxes (S_i) using the Metropolis Hastings algorithm.
We assume priors: Gaussian measurments fluxes, uniform spectral index, uniform S1.4, and uniform P1.4.
Detection is defined as 5*sigma_rms.
The detection mask can be defined to include nondetection measurements (a valid assumption for point sources).
The posterior density is then: prior x Likelihood (with priors described above).
The likelihood is an L2 on spectral index and S1.4 due to the Gaussian prior on observables.
Likelihood = exp(-1/2 * Sum (S_obs - g(alpha_i,S1.4))**2 / (Cd_i + Ct_i))
where S_obs are the measured fluxes
g(alpha_i,S1.4) gives model S_i
Cd_i is the measurement variance S_i
Ct_i is a systematic for g(...) taken to be (0.15*S_obs)**2
assuming z ~ 0.516 +- 0.002 we use the sampling of alpha and S14 to monte carlo compute the mean and variances of
posterior S_i and P14 in lognormal as suggested by their posterior plots.
We find that the posterior distributions for:
alpha is Gaussian
S1.4 is lognormal
P1.4 is lognormal
S_i is lognormal
'''
import numpy as np
import pylab as plt
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
def g(alpha,S14,nu):
'''Forward equation, evaluate model at given nu array'''
out = S14*(nu/1400e6)**alpha
return out
def L(Sobs,alpha,S14,nu,CdCt):
'''Likeihood for alpha and S14'''
#only as nu_obs
d = g(alpha,S14,nu)
L2 = np.sum((Sobs - d)**2/CdCt)
#L1 = np.sum(np.abs(Sobs - d)/np.sqrt(CdCt))
return np.exp(-L2/2.)
def P(nu,z,alpha,S14):
c = 3e8
h0 = 0.7
ch = 1.32151838
q0 = 0.5
D = ch*z*(1+z*(1-q0)/(np.sqrt(1+2*q0*z) + 1 + q0*z))
S = S14*(nu/1400e6)
out = 4*np.pi*S*D**2 / (1+z)**(1+alpha) * 1e26
return out/1e24
def MHSolveSpectealIndex(nu,S,Cd,Ct,name,z,dz,nuModel=None,plot=False,plotDir=None):
'''Assumes S in mJy'''
if nuModel is None:
nuModel = nu
if plotDir is not None:
import os
try:
os.makedirs(plotDir)
except:
pass
N = int(1e6)
alpha_ = np.zeros(N,dtype=np.double)
S14_ = np.zeros(N,dtype=np.double)
alpha_[0] = -0.8
S14_[0] = S[0]*(1400e6/nu[0])**-0.8
print("Working on source {}".format(name))
mask = detectionMask[idx,:]
CdCt = Cd + Ct
Li = L(S,alpha_[0],S14_[0],nu,CdCt)
print("Initial L: {}".format(Li))
maxL = Li
alphaMAP = alpha_[0]
S14MAP = S14_[0]
accepted = 0
binning = 50
i = 1
while accepted < binning*binning and i < N:
#sample priors in uniform steps
alpha_j = np.random.uniform(low=alpha_[i-1] - 0.5,high=alpha_[i-1] + 0.5)
S14_j = 10**(np.random.uniform(low = np.log10(S14_[i-1]/100),high=np.log10(S14_[i-1]*100)))
Lj = L(S,alpha_j,S14_j,nu,CdCt)
if np.random.uniform() < Lj/Li:
alpha_[i] = alpha_j
S14_[i] = S14_j
Li = Lj
accepted += 1
else:
alpha_[i] = alpha_[i-1]
S14_[i] = S14_[i-1]
if Lj > maxL:
maxL = Lj
alphaMAP = alpha_j
S14MAP = S14_j
i += 1
if accepted == binning**2:
print("Converged in {} steps".format(i))
print("Acceptance: {}, rate : {}".format(accepted,float(accepted)/i))
else:
print("Acceptance: {}, rate : {}".format(accepted,float(accepted)/i))
alpha_ = alpha_[:i]
S14_ = S14_[:i]
#integrate out uncertainty unsing MC integration
logS_int = np.zeros([len(alpha_),len(nuModel)],dtype=np.double)
logP14_int = np.zeros(len(alpha_),dtype=np.double)
i = 0
while i < len(alpha_):
logS_int[i,:] = np.log(g(alpha_[i],S14_[i],nuModel))
logP14_int[i] = np.log(P(1400e6,np.random.normal(loc=z,scale=dz),alpha_[i],S14_[i]/1e3))
i += 1
logS_mu = np.mean(logS_int,axis=0)
logS_std = np.sqrt(np.mean(logS_int**2,axis=0) - logS_mu**2)
logP14_mu = np.mean(logP14_int)
logP14_std = np.sqrt(np.mean(logP14_int**2) - logP14_mu**2)
S_post_mu = np.exp(logS_mu)
S_post_up = np.exp(logS_mu + logS_std) - S_post_mu
S_post_low = S_post_mu - np.exp(logS_mu - logS_std)
P14_post_mu = np.exp(logP14_mu)
P14_post_up = np.exp(logP14_mu + logP14_std) - P14_post_mu
P14_post_low = P14_post_mu - np.exp(logP14_mu- logP14_std)
P14 = P14_post_mu
P14u = P14_post_up
P14l = P14_post_low
alpha = np.mean(alpha_)
std_alpha = np.std(alpha_)
mu = np.exp(np.mean(np.log(S14_)))
S14 = mu
S14u = np.exp(np.mean(np.log(S14_)) + np.std(np.log(S14_))) - mu
S14l = mu - np.exp(np.mean(np.log(S14_)) - np.std(np.log(S14_)))
if plot:
plt.hist(alpha_,bins=binning)
plt.xlabel(r"$\alpha$")
plt.ylabel(r"Count")
plt.title("alpha")
if plotDir is not None:
plt.savefig("{}/{}-alpha-posterior.png".format(plotDir,name),format='png')
plt.clf()
else:
plt.show()
plt.hist(S14_,bins=binning)
plt.xlabel(r"$S_{\rm 1.4GHz}[mJy]$")
plt.ylabel(r"Count")
plt.title("S14")
if plotDir is not None:
plt.savefig("{}/{}-S14-posterior.png".format(plotDir,name),format='png')
plt.clf()
else:
plt.show()
plt.hist(np.log10(S14_),bins=binning)
plt.xlabel(r"$\log_{10}{S_{\rm 1.4GHz}[mJy]}$")
plt.ylabel(r"Count")
plt.title("log(S14)")
if plotDir is not None:
plt.savefig("{}/{}-logS14-posterior.png".format(plotDir,name),format='png')
plt.clf()
else:
plt.show()
print("---------")
print("Results for source {}".format(name))
print("Max Likelihood: {}".format(maxL))
print("alpha: {} +- {}".format(alpha,std_alpha))
print("MAP alpha: {}".format(alphaMAP))
print("S14: {} + {} - {} mJy".format(S14,S14u,S14l))
print("MAP S14: {} mJy".format(S14MAP))
for fi in range(len(nuModel)):
mu = S_post_mu[fi]
up = S_post_up[fi]
low = S_post_low[fi]
print("(lognormal) S{}MHz: {} + {} - {} mJy".format(int(nuModel[fi]/1e6),mu,up,low))
print("(lognormal) P14: {} + {} - {} mJy".format(P14_post_mu,
P14_post_up,
P14_post_low))
#plot the Gassuan model and data
if plot:
plt.errorbar(nu, S, yerr=np.sqrt(CdCt), fmt='x',label='data')
plt.errorbar(nuModel, S_post_mu, yerr=[S_post_up,S_post_low], fmt='--o',label='model')
plt.xlabel(r"$\nu$ [Hz]")
plt.ylabel(r"$S(\nu)$ [mJy]")
#plt.plot(nu,S_map,label='map')
#plt.errorbar(nu, S_model, yerr=CdCt[idx,mask], fmt='--o')
plt.legend()
points = []
for j in range(len(nuModel)):
points.append((nuModel[j],S_post_mu[j] + S_post_up[j]))
#points.append((nuModel[j],S_post_mu[j] - S_post_low[j]))
for j in range(len(nuModel)):
#points.append((nuModel[j],S_post_mu[j] + S_post_up[j]))
points.append((nuModel[-j-1],S_post_mu[-j-1] - S_post_low[-j-1]))
plt.gca().add_collection(PatchCollection([Polygon(points,True)],alpha=0.4))
plt.yscale('log')
plt.xscale('log')
if plotDir is not None:
plt.savefig("{}/{}-fluxes-posterior.png".format(plotDir,name),format='png')
plt.clf()
else:
plt.show()
print("--------")
return alpha,std_alpha,S14,S14u,S14l,S_post_mu,S_post_up,S_post_low,P14_post_mu,P14_post_up,P14_post_low
if __name__ == '__main__':
names = ['C1+2','NW1','NW2','H','E','X1','X2','S']
nu = np.array([147.667e6,322.667e6,608.046e6])
rms = np.array([1.4e-3,120e-6,90e-6])*1e3
beams = np.array([43.3*18.9,17.5*9.5,7.2*4.9])*np.pi/4./np.log(2.)
print("Beams: {} (arcsec^2)".format(beams))
pixels = np.array([5.25**2,2**2,1.25**2])
print("px/beam: {} (pixels)".format(beams/pixels))
print("Uncertainty per px: {} mJy".format(rms*np.sqrt(pixels/beams)))
#measurement mask
detectionMask = np.bitwise_not(np.array([[0,0,0],
[0,0,0],
[0,0,0],
[0,0,0],
[0,0,0],
[0,0,0],
[0,0,0],
[0,0,0]],dtype=np.bool))
#measurements
S = np.array([[ 66.034 , 7.653 , 4.241 ],
[ 159.14 , 62.206 , 45.998 ],
[ 147.575 , 77.056 , 46.834 ],
[ 10.40630611, 3.98776452, 1.16477836],
[ 57.346 , 22.343 , 7.6797 ],
[ 40.672 , 4.556 , 0.48076422],
[ 9.45811655, 5.508 , 4.426 ],
[ 32.342 , 15.314 , 9.277 ]],dtype=np.double)
std_d = np.array([[ 6.58200000e+00, 2.94200000e-01, 3.12511200e-01],
[ 7.85100000e+00, 3.86200000e-01, 1.05200000e-01],
[ 8.11100000e+00, 3.54800000e-01, 3.58600000e-01],
[ 1.34408838e+00, 2.55608364e-01, 4.16152840e-01],
[ 7.16500000e+00, 3.11300000e-01, 2.90741019e-04],
[ 7.82100000e+00, 2.09200000e-01, 8.90090959e-02],
[ 1.40738833e+00, 2.27200000e-01, 2.34200000e-01],
[ 8.07500000e+00, 2.77200000e-01, 3.67694221e-01]],dtype=np.double)
S = np.array([[66.034,7.653,2.357 + 1.884],#c12
[159.140,62.206,45.998],#nw1
[147.575,77.056,46.834],#nw2
[648.7*pixels[0]/beams[0],324.8*pixels[1]/beams[1],76.31*pixels[2]/beams[2]],#h
[57.346,22.343,(7.619+6.07E-2)],#e
[40.672,4.556,12.3*pixels[2]/beams[2]],#x1
[318.2*pixels[0]/beams[0],5.508,4.426],#x2
[32.342,15.314,3.744+5.533]],dtype=np.double)#s
std_d = np.array([[6.582,2.942E-1,np.sqrt(2.086E-1**2 + 2.327E-1**2)],#c12
[7.851,3.862E-1,1.052E-1],#nw1
[8.111,3.548E-1,3.586E-1],#nw2
[rms[0]*np.sqrt(937.1/beams[0]),rms[1]*np.sqrt(928./beams[1]),rms[2]*np.sqrt(925./beams[2])],#h
[7.165,3.113E-1,np.sqrt(1.845E-4**2 + 2.247E-4**2)],#e
[7.821,2.092E-1,rms[2]*np.sqrt(39.1/beams[2])],#x1
[rms[0]*np.sqrt(937.1/beams[0]),2.272E-1,2.342E-1],#x2
[8.075,2.772E-1,np.sqrt(1.900E-1**2 + 3.148E-1**2)]],dtype=np.double)#s
Cd = std_d**2
Ct = (S*0.15)**2
CdCt = Cd + Ct
#previous estimates
#alpha0 = np.array([-2.5501,-0.8804,-0.8458, -1.4624, -0.1102, -0.8988, -0.3312, -0.7236],dtype=np.double)
#S140 = np.array([0.4034,20.5293, 23.2775, 0.113, 13.2874, 1.1842, 3.0169, 6.2674],dtype=np.double)
#P0 = np.array([0.4,13,15,2.1,5.7,0.9,2.3,3.7],dtype=np.double)
#samples
m = S.shape[0]
#posterior moments
alpha = np.zeros(m,dtype=np.double)
std_alpha = np.zeros(m,dtype=np.double)
S14 = np.zeros(m,dtype=np.double)
S14u = np.zeros(m,dtype=np.double)
S14l = np.zeros(m,dtype=np.double)
P14 = np.zeros(m,dtype=np.double)
P14u = np.zeros(m,dtype=np.double)
P14l = np.zeros(m,dtype=np.double)
S_post_mu = np.zeros([m,3],dtype=np.double)
S_post_up = np.zeros([m,3],dtype=np.double)
S_post_low = np.zeros([m,3],dtype=np.double)
idx = 0
while idx < m:
mask = detectionMask[idx,:]
alpha_,std_alpha_,S14_,S14u_,S14l_, S_post_mu_,S_post_up_,S_post_low_,P14_post_mu_,P14_post_up_,P14_post_low_ = MHSolveSpectealIndex(nu[mask],S[idx,mask],
Cd[idx,mask],Ct[idx,mask],
names[idx],0.516,0.002,nuModel=nu,plot=True,
plotDir='spectral-figs')
alpha[idx] = alpha_
std_alpha[idx] = std_alpha_
S14[idx] = S14_
S14u[idx] = S14u_
S14l[idx] = S14l_
S_post_mu[idx,:] = S_post_mu_
S_post_up[idx,:] = S_post_up_
S_post_low[idx,:] = S_post_low_
P14[idx] = P14_post_mu_
P14u[idx] = P14_post_up_
P14l[idx] = P14_post_low_
idx += 1
i = 0
while i < len(alpha):
print(r"{} & ${:.2g} \pm {:.2g}$ & ${:.2g} \pm {:.2g}$ & ${:.2g} \pm {:.2g}$ & ${:.2g} \pm {:.2g}$ & ${:.2g}^{{{:.2g}}}_{{{:.2g}}}$ & ${:.2g}^{{{:.2g}}}_{{{:.2g}}}$\\".format(names[i],
S[i,0],np.sqrt(CdCt[i,0]),
S[i,1],np.sqrt(CdCt[i,1]),
S[i,2],np.sqrt(CdCt[i,2]),
alpha[i],std_alpha[i],
S14[i],S14u[i],S14l[i],
P14[i],P14u[i],P14l[i]))
i += 1
f, axs = plt.subplots(4,2,sharex=True,figsize=(11,11))
cols= 2
rows = 4
i=0
while i < len(alpha):
#i = row*2 + col
col = i%cols
row = (i - col)//cols
if rows == 1:
if cols == 1:
ax = axs
else:
ax = axs[col]
else:
ax = axs[row][col]
mask = detectionMask[i,:]
mask[:] = True
ax.errorbar(nu[mask], S[i,mask], yerr=np.sqrt(CdCt[i,mask]), fmt='x',label='data')
ax.errorbar(nu, S_post_mu[i,:], yerr=[S_post_up[i,:],S_post_low[i,:]], fmt='--o',label='model')
points = []
for j in range(len(nu)):
points.append((nu[j],S_post_mu[i,j] + S_post_up[i,j]))
#points.append((nuModel[j],S_post_mu[j] - S_post_low[j]))
for j in range(len(nu)):
#points.append((nuModel[j],S_post_mu[j] + S_post_up[j]))
points.append((nu[-j-1],S_post_mu[i,-j-1] - S_post_low[i,-j-1]))
ax.add_collection(PatchCollection([Polygon(points,True)],alpha=0.4))
#ax.set_ylim([])
ax.set_yscale('log')
ax.set_xscale('log')
ylims = list(ax.get_ylim())
ylims[0] = 10**(np.floor(np.log10(ylims[0])))
ylims[1] = 10**(np.ceil(np.log10(ylims[1])))
print(ylims)
ax.set_ylim(ylims)
ax.set_xticks([])#('right')
ax.set_xticklabels([])
if col==1:
ax.yaxis.set_label_position('right')
i += 1
#axs[-1][0].set_xlabel(r'$\nu$ [Hz]')
#axs[-1][1].set_xlabel(r'$\nu$ [Hz]')
#axs[4>>1][0].set_ylabel(r'$S(\nu)$ [mJy]')
f.subplots_adjust(hspace=0,wspace=0)
plt.setp([ax.get_xticklabels() for ax in f.axes],visible=False)
plt.setp([ax.get_yticklabels() for ax in f.axes],visible=False)
plt.show()
# In[ ]:
def plotSpectrum(nu,S,CdCt,S_post_mu,S_post_up,S_post_low, mask, ax):
ax.errorbar(nu[mask], S[mask], yerr=np.sqrt(CdCt[mask]), fmt='x',label='data')
#ax.errorbar(nu, S_post_mu, yerr=[S_post_up,S_post_low], fmt='--o',label='model')
ax.plot(nu, S_post_mu, ls='--',label='model')
points = []
for j in range(len(nu)):
points.append((nu[j],S_post_mu[j] + S_post_up[j]))
#points.append((nuModel[j],S_post_mu[j] - S_post_low[j]))
for j in range(len(nu)):
#points.append((nuModel[j],S_post_mu[j] + S_post_up[j]))
points.append((nu[-j-1],S_post_mu[-j-1] - S_post_low[-j-1]))
ax.add_collection(PatchCollection([Polygon(points,True)],alpha=0.4))
#ax.set_ylim([])
ax.set_yscale('log')
ax.set_xscale('log')
ylims = list(ax.get_ylim())
ylims[0] = 10**(np.floor(np.log10(ylims[0])))
ylims[1] = 10**(np.ceil(np.log10(ylims[1])))
ax.set_ylim(ylims)
ax.set_xticks([])#('right')
ax.set_xticklabels([])
f, axs = plt.subplots(4,2,sharex=True,figsize=(11,11))
cols= 2
rows = 4
i=0
while i < len(alpha):
#i = row*2 + col
col = i%cols
row = (i - col)//cols
if rows == 1:
if cols == 1:
ax = axs
else:
ax = axs[col]
else:
ax = axs[row][col]
#plt.figure()
#ax = plt.subplot(111)
mask = detectionMask[i,:]
mask[:] = True
plotSpectrum(np.append(nu,1400e6),np.append(S[i,:],0),np.append(CdCt[i,:],0),np.append(S_post_mu[i,:],S14[i]),
np.append(S_post_up[i,:],S14u[i]),np.append(S_post_low[i,:],S14l[i]),np.append(mask,False), ax)
if col==1:
ax.yaxis.set_label_position('right')
i += 1
#axs[-1][0].set_xlabel(r'$\nu$ [Hz]')
#axs[-1][1].set_xlabel(r'$\nu$ [Hz]')
#axs[4>>1][0].set_ylabel(r'$S(\nu)$ [mJy]')
f.subplots_adjust(hspace=0,wspace=0)
#plt.setp([ax.get_xticklabels() for ax in f.axes],visible=False)
#plt.setp([ax.get_yticklabels() for ax in f.axes],visible=False)
plt.show()
# In[ ]:
| [
"[email protected]"
] | |
3c42675b2801430204bd563ea33ea0d5e27554ea | a9ca47eddea033e7d3ea530ee62dc3c70c07702e | /leet_code717.py | 86293c99210bdf282c0023bf9c2d4ddcf4177cab | [] | no_license | tejamupparaju/LeetCode_Python | 94d5eb80ea038dfdfc6ce5e8d833af9404215f01 | 6e4894c2d80413b13dc247d1783afd709ad984c8 | refs/heads/master | 2021-01-22T05:33:58.443569 | 2018-11-05T18:00:42 | 2018-11-05T18:00:42 | 81,676,499 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | """
717. 1-bit and 2-bit Characters
We have two special characters. The first character can be represented by one bit 0. The second character can be represented by two bits (10 or 11).
Now given a string represented by several bits. Return whether the last character must be a one-bit character or not. The given string will always end with a zero.
Example 1:
Input:
bits = [1, 0, 0]
Output: True
Explanation:
The only way to decode it is two-bit character and one-bit character. So the last character is one-bit character.
Example 2:
Input:
bits = [1, 1, 1, 0]
Output: False
Explanation:
The only way to decode it is two-bit character and two-bit character. So the last character is NOT one-bit character.
Note:
1 <= len(bits) <= 1000.
bits[i] is always 0 or 1.
Companies
Quora
"""
# MOWN
# everytime we hit 1 we update a flag
class Solution(object):
def isOneBitCharacter(self, bits):
"""
:type bits: List[int]
:rtype: bool
"""
result = two = False
for num in bits:
if not two:
if num == 0:
result = True
else:
two = True
result = False
else:
two = False
return result
| [
"[email protected]"
] | |
8f9f30d6831cde1e4fcbf7f239ea70d1b594ea72 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/224/users/4378/codes/1734_2506.py | 469f436519bd3fef7d7fc8e98ac992cba7f1f7a7 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | q_inicial=int(input("quantidade inicial: "))
perc=float(input("percentual de crescimento: "))
quant=int(input("quantidade de pirarucus retirados: "))
perc=perc/100
t=0
while(0<=q_inicial <=12000):
q_inicial=(q_inicial+q_inicial*perc)-quant
t=t+1
if(q_inicial<=0):
print("EXTINCAO")
print(t)
if(q_inicial>=12000):
print("LIMITE")
print(t) | [
"[email protected]"
] | |
461f66f089b405e1c9d10bf004d48db54981b2e5 | 4ca44b7bdb470fcbbd60c2868706dbd42b1984c9 | /21.05.09/SWEA_1208.py | 739beb273d68e6696bc62fdfc2ef56bb16859b3f | [] | no_license | titiman1013/Algorithm | 3b3d14b3e2f0cbc4859029eb73ad959ec8778629 | 8a67e36931c42422779a4c90859b665ee468255b | refs/heads/master | 2023-06-29T17:04:40.015311 | 2021-07-06T01:37:29 | 2021-07-06T01:37:29 | 242,510,483 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 351 | py | import sys; sys.stdin = open('1208.txt', 'r')
for tc in range(1, 11):
N = int(input())
arr = list(map(int, input().split()))
for i in range(N):
if arr.count(max(arr)) >= len(arr) - 1: break
arr[arr.index(max(arr))] -= 1
arr[arr.index(min(arr))] += 1
answer = max(arr) - min(arr)
print(f'#{tc} {answer}') | [
"[email protected]"
] | |
f6c50cffa088f6323d15a6017b52e49948b80c58 | 00c9022edb984b68f0a6b1df9ba15d62d79ec62e | /src/chapter3/exercise7.py | 674b706737ef6f39bf866101b9d8972467bc2342 | [
"MIT"
] | permissive | Group3BCS1/BCS-2021 | c037bcc86193ac1c773fc9d402ef9d95a4d1276d | b8ee8f900e3fd23822844e10fb2c6475a4f3400a | refs/heads/main | 2023-06-03T08:19:24.035239 | 2021-06-18T07:42:49 | 2021-06-18T07:42:49 | 349,341,077 | 0 | 2 | MIT | 2021-03-19T07:45:39 | 2021-03-19T07:45:38 | null | UTF-8 | Python | false | false | 863 | py | try:
location = input("enter location: ")
location = location.upper() # this converts the user's string to uppercase
pay = float(input('enter pay: '))
if location == 'MBARARA' and pay > 4000000:
print('I WILL TAKE THE JOB')
elif location == 'MBARARA' and pay <= 4000000:
print('SORRY, I CAN NOT WORK FOR THAT')
elif location == 'KAMPALA' and pay > 10000000:
print('I WILL DEFINITELY WORK')
elif location == 'KAMPALA' and pay <= 10000000:
print('NO WAY !')
elif location == 'SPACE' and pay >= 0:
print('WITHOUT DOUBT, I WILL TAKE IT')
elif pay >= 6000000: # x==other districts and y>=6000000
print('I will surely work')
else:
print('No thanks, I can find something better')
except:
print('invalid entry') # this is printed when the user enters an invalid input
| [
"[email protected]"
] | |
6f2d0035b27900c2f7fc843139b6b74c96b2bf51 | ae2c2bdf3aba6b5bd0753778be1991f968e689ec | /dotbiz/migrations/0031_uploadedfile_file_name.py | 86e1abecbb628ab31cbfd47120129aac6079d12d | [] | no_license | bintangx1902/clone_biz | 7cd5ce56073ebd7627c93bb1608de10177a33dfd | 80ee7b04aaff167001f5cbd9307bd3f487d3919c | refs/heads/main | 2023-08-24T10:53:18.590655 | 2021-10-25T06:32:28 | 2021-10-25T06:32:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 410 | py | # Generated by Django 3.1.4 on 2020-12-30 15:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dotbiz', '0030_uploadedfile'),
]
operations = [
migrations.AddField(
model_name='uploadedfile',
name='file_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"[email protected]"
] | |
6bbddf9120b6877356513aa49b8ece59f6e330f0 | f48778b953c76cfb0039735094eb2246d6e56177 | /graphs/enhanced_qos/experiment_rssi/plot_association.py | a8ac4e20efc2706cfee98114ffb05ca5c8729634 | [] | no_license | phisolani/wifi_monitoring | 36826c52a00cf79d71f93c4acd0dc8b6d56ff934 | 06c62e625d395e4f179b9fef95318a746fee1786 | refs/heads/master | 2021-06-07T02:53:59.832752 | 2021-04-09T09:36:37 | 2021-04-09T09:36:37 | 142,306,383 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | #!/usr/bin/env python
__author__ = "Daniel Kulenkamp"
__copyright__ = "Copyright 2020, QoS-aware WiFi Slicing"
__license__ = "GPL"
__version__ = "1.0"
__maintainer__ = "Daniel Kulenkamp"
__email__ = "[email protected]"
__status__ = "Prototype"
" Python script for testing association plot"
from graphs.enhanced_qos.association_graph import *
make_association_graph(
experiment_path='workload/experiment_2/gomez/association/',
filenames=['ap_1_association', 'ap_2_association', 'ap_3_association']
) | [
"[email protected]"
] | |
da866e6ea6f4eb4518bbc5ec3a0d9a1479af131e | c3c8b7779381c37a97c7176947f175a1168b6149 | /instrument/generate_port_agent_config.py | 95920fcfc744e6aff2b2d762f9af8f11f88e0f0a | [] | no_license | ronkyo/ooi-tools | 9f7e51dc6fbc8fc5d77d91d3523f4342e957f8e1 | 36034ae3f950ff7c585363c6cc07f8a51549185d | refs/heads/master | 2021-01-21T03:54:24.522565 | 2015-04-27T23:22:23 | 2015-04-28T20:52:01 | 25,649,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,645 | py | #!/usr/bin/env python
"""
generate_port_agent_config.py
Generate port agent configuration files and a supervisord file from CSV input
Usage:
generate_port_agent_config.py <path> <name> <csv_file>
"""
import shutil
import jinja2
import os
import docopt
from csv import DictReader
loader = jinja2.FileSystemLoader(searchpath="templates")
env = jinja2.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True)
pa_template = env.get_template('pa_config.jinja')
super_template = env.get_template('supervisord.jinja')
def prep_dir(path, name):
configdir = os.path.join(path, 'configs')
configfile = os.path.join(path, '%s.conf' % name)
if os.path.exists(path):
if not os.path.isdir(path):
raise Exception('path matches existing file, looking for directory')
if os.path.exists(configdir):
shutil.rmtree(configdir)
if os.path.exists(configfile):
os.remove(configfile)
os.makedirs(configdir)
def create_pa_dict(csv_file):
pa_dict = {}
with open(csv_file) as fh:
for row in DictReader(fh):
name = create_pa_name(row)
if name is not None:
pa_dict[name] = row
return pa_dict
def create_pa_name(pa_dict):
if any([pa_dict['instrument'] == '', pa_dict['refdes'] == '']):
return None
return '%s_%s' % (pa_dict['instrument'], pa_dict['refdes'])
def create_pa_config(pa_dict):
return pa_template.render(**pa_dict)
def create_pa_configs(pa_dict):
pa_configs = {}
for name in pa_dict:
config = create_pa_config(pa_dict[name])
pa_configs[name] = config
return pa_configs
def create_supervisord_config(name, pa_dict):
groups = {}
for name in pa_dict:
each = pa_dict[name]
group = each['group']
groups.setdefault(group, []).append(name)
return super_template.render(name=name, groups=groups)
def write(path, name, supervisord_config, pa_configs):
with open(os.path.join(path, '%s.conf' % name), 'wb') as fh:
fh.write(supervisord_config)
for pa_name in pa_configs:
with open(os.path.join(path, 'configs', '%s.conf' % pa_name), 'wb') as fh:
fh.write(pa_configs[pa_name])
def main():
options = docopt.docopt(__doc__)
path = options['<path>']
name = options['<name>']
csv_file = options['<csv_file>']
prep_dir(path, name)
pa_dict = create_pa_dict(csv_file)
pa_configs = create_pa_configs(pa_dict)
supervisord_config = create_supervisord_config(name, pa_dict)
write(path, name, supervisord_config, pa_configs)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
5b545d841152235d7e1f5f4957a3721084e203b0 | 8d6f73af697f56834e8cef88876be78a360190d9 | /flaskblog/users/forms.py | 73112b0e9cadba5151b9b28e081593b41fb3d9b2 | [] | no_license | Sukhrobjon/flask-blog | 7a837f3454263f69935d88cc3c73858a2848c22c | 357b311efd32f3537fce9f84ecbadd0835aac524 | refs/heads/master | 2020-04-18T10:49:58.829788 | 2019-02-25T23:28:14 | 2019-02-25T23:28:14 | 167,480,195 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,654 | py | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from wtforms import StringField, PasswordField, SubmitField, BooleanField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from flask_login import current_user
from flaskblog.models import User
class RegistrationForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_username(self, username):
'''
Raise ValidationError if username exists in database alreay
'''
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError(
'This username is taken! Please choose different one')
def validate_email(self, email):
'''
Raise ValidationError if email exists in database already
'''
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError(
'This email is taken! Please choose different one')
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email', validators=[DataRequired(), Email()])
picture = FileField('Update profile picture', validators=[
FileAllowed(['jpg', 'png'])])
submit = SubmitField('Update Account')
def validate_username(self, username):
'''
Raise ValidationError if username exists in database alreay
'''
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError(
'This username is taken! Please choose different one')
def validate_email(self, email):
'''
Raise ValidationError if email exists in database alreay
'''
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError(
'This email is taken! Please choose different one')
class RequestResetForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
submit = SubmitField('Request Password Reset')
def validate_email(self, email):
'''
Raise ValidationError if email exists in database alreay
'''
user = User.query.filter_by(email=email.data).first()
if user is None:
raise ValidationError(
'There is no account associated with this email. You must register first.')
class ResetPasswordForm(FlaskForm):
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Reset Password')
| [
"[email protected]"
] | |
bd65812cfdd6ab3cd0e5706f3851f79bf3fe9fda | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02803/s706884747.py | 68d1a7bdccac1d1136b3fdc1c6c24530314347e6 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,575 | py | import numpy as np
H, W = map(int, input().split())
map_lis = [input() for i in range(H)]
ans = 0
def prob(x,y,h,w):
if 0<=x<h and 0<=y<w:
return True
else:
return False
def count(x,y,lis):
if not lis[x][y]:
return True
else:
return False
def maze(x,y,lis):
if lis[x][y] == ".":
return True
else:
return False
for i in range(H):
for j in range(W):
k = 0
count_lis = np.zeros((H,W))
num_lis = [[] for i in range(1000)]
if maze(i,j,map_lis):
count_lis[i][j] = 1
num_lis[0].append([i,j])
while True:
for l in num_lis[k]:
if prob(l[0]-1,l[1],H,W) and count(l[0]-1,l[1],count_lis) and maze(l[0]-1,l[1],map_lis):
num_lis[k+1].append([l[0]-1,l[1]])
count_lis[l[0]-1][l[1]] = 1
if prob(l[0],l[1]-1,H,W) and count(l[0],l[1]-1,count_lis) and maze(l[0],l[1]-1,map_lis):
num_lis[k+1].append([l[0],l[1]-1])
count_lis[l[0]][l[1]-1] = 1
if prob(l[0]+1,l[1],H,W) and count(l[0]+1,l[1],count_lis) and maze(l[0]+1,l[1],map_lis):
num_lis[k+1].append([l[0]+1,l[1]])
count_lis[l[0]+1][l[1]] = 1
if prob(l[0],l[1]+1,H,W) and count(l[0],l[1]+1,count_lis) and maze(l[0],l[1]+1,map_lis):
num_lis[k+1].append([l[0],l[1]+1])
count_lis[l[0]][l[1]+1] = 1
new_ans = 0
for m in num_lis[1:]:
if m != []:
new_ans += 1
if m == []:
break
ans = max(ans, new_ans)
k += 1
if num_lis[k] == []:
break
print(ans) | [
"[email protected]"
] | |
098e437846ce6912a30c7c1604db09a0b6f8e608 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/87/usersdata/190/59803/submittedfiles/contido.py | 665f253469da7a4df1c7e7b19b10513a1ca38f72 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | # -*- coding: utf-8 -*-
n=int(input('digite o numero de elementos de a:'))
m=int(input('digite o numero de elementos de b:'))
a=[]
b=[]
for i in range(1,n+1,1):
valor=float(input('digite o valor:'))
a.append(valor)
print(a) | [
"[email protected]"
] | |
8e2190e81d771f4be02cbe29247c7364b863a649 | da1dbb0e1c8c323bbf7ba0eac43b5815ce075282 | /python/ccxt/btcmarkets.py | 17d7528793b07519dacb1addd93c125255c0cd09 | [
"MIT"
] | permissive | alexander-dev-hub/ccxt | d339662d527bdf0d99380c61ccce233c4475d1a1 | eba5dbe98cf106361c45cec9debda3d2722ea878 | refs/heads/master | 2022-07-10T05:03:35.809978 | 2019-09-02T19:10:10 | 2019-09-02T19:10:10 | 205,919,117 | 1 | 1 | MIT | 2022-06-22T15:56:21 | 2019-09-02T19:00:14 | JavaScript | UTF-8 | Python | false | false | 24,692 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import base64
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
class btcmarkets (Exchange):
def describe(self):
return self.deep_extend(super(btcmarkets, self).describe(), {
'id': 'btcmarkets',
'name': 'BTC Markets',
'countries': ['AU'], # Australia
'rateLimit': 1000, # market data cached for 1 second(trades cached for 2 seconds)
'has': {
'CORS': False,
'fetchOHLCV': True,
'fetchOrder': True,
'fetchOrders': True,
'fetchClosedOrders': 'emulated',
'fetchOpenOrders': True,
'fetchMyTrades': True,
'cancelOrders': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/29142911-0e1acfc2-7d5c-11e7-98c4-07d9532b29d7.jpg',
'api': {
'public': 'https://api.btcmarkets.net',
'private': 'https://api.btcmarkets.net',
'web': 'https://btcmarkets.net/data',
},
'www': 'https://btcmarkets.net',
'doc': 'https://github.com/BTCMarkets/API',
},
'api': {
'public': {
'get': [
'market/{id}/tick',
'market/{id}/orderbook',
'market/{id}/trades',
'v2/market/{id}/tickByTime/{timeframe}',
'v2/market/{id}/trades',
'v2/market/active',
],
},
'private': {
'get': [
'account/balance',
'account/{id}/tradingfee',
'fundtransfer/history',
'v2/order/open',
'v2/order/open/{id}',
'v2/order/history/{instrument}/{currency}/',
'v2/order/trade/history/{id}',
'v2/transaction/history/{currency}',
],
'post': [
'fundtransfer/withdrawCrypto',
'fundtransfer/withdrawEFT',
'order/create',
'order/cancel',
'order/history',
'order/open',
'order/trade/history',
'order/createBatch', # they promise it's coming soon...
'order/detail',
],
},
'web': {
'get': [
'market/BTCMarkets/{id}/tickByTime',
],
},
},
'timeframes': {
'1m': 'minute',
'1h': 'hour',
'1d': 'day',
},
'exceptions': {
'3': InvalidOrder,
'6': DDoSProtection,
},
})
def fetch_transactions(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {}
if limit is not None:
request['limit'] = limit
if since is not None:
request['since'] = since
response = self.privateGetFundtransferHistory(self.extend(request, params))
transactions = response['fundTransfers']
return self.parseTransactions(transactions, None, since, limit)
def parse_transaction_status(self, status):
# todo: find more statuses
statuses = {
'Complete': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, item, currency=None):
#
# {
# status: 'Complete',
# fundTransferId: 1904311906,
# description: 'ETH withdraw from [[email protected]] to Address: 0xF123aa44FadEa913a7da99cc2eE202Db684Ce0e3 amount: 8.28965701 fee: 0.00000000',
# creationTime: 1529418358525,
# currency: 'ETH',
# amount: 828965701,
# fee: 0,
# transferType: 'WITHDRAW',
# errorMessage: null,
# lastUpdate: 1529418376754,
# cryptoPaymentDetail: {
# address: '0xF123aa44FadEa913a7da99cc2eE202Db684Ce0e3',
# txId: '0x8fe483b6f9523559b9ebffb29624f98e86227d2660d4a1fd4785d45e51c662c2'
# }
# }
#
# {
# status: 'Complete',
# fundTransferId: 494077500,
# description: 'BITCOIN Deposit, B 0.1000',
# creationTime: 1501077601015,
# currency: 'BTC',
# amount: 10000000,
# fee: 0,
# transferType: 'DEPOSIT',
# errorMessage: null,
# lastUpdate: 1501077601133,
# cryptoPaymentDetail: null
# }
#
# {
# "fee": 0,
# "amount": 56,
# "status": "Complete",
# "currency": "BCHABC",
# "lastUpdate": 1542339164044,
# "description": "BitcoinCashABC Deposit, P 0.00000056",
# "creationTime": 1542339164003,
# "errorMessage": null,
# "transferType": "DEPOSIT",
# "fundTransferId": 2527326972,
# "cryptoPaymentDetail": null
# }
#
timestamp = self.safe_integer(item, 'creationTime')
lastUpdate = self.safe_integer(item, 'lastUpdate')
transferType = self.safe_string(item, 'transferType')
cryptoPaymentDetail = self.safe_value(item, 'cryptoPaymentDetail', {})
address = self.safe_string(cryptoPaymentDetail, 'address')
txid = self.safe_string(cryptoPaymentDetail, 'txId')
type = None
if transferType == 'DEPOSIT':
type = 'deposit'
elif transferType == 'WITHDRAW':
type = 'withdrawal'
else:
type = transferType
fee = self.safe_float(item, 'fee')
status = self.parse_transaction_status(self.safe_string(item, 'status'))
ccy = self.safe_string(item, 'currency')
code = self.safe_currency_code(ccy)
# todo: self logic is duplicated below
amount = self.safe_float(item, 'amount')
if amount is not None:
amount = amount * 1e-8
return {
'id': self.safe_string(item, 'fundTransferId'),
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': lastUpdate,
'fee': {
'currency': code,
'cost': fee,
},
'info': item,
}
def fetch_markets(self, params={}):
response = self.publicGetV2MarketActive(params)
result = []
markets = self.safe_value(response, 'markets')
for i in range(0, len(markets)):
market = markets[i]
baseId = self.safe_string(market, 'instrument')
quoteId = self.safe_string(market, 'currency')
id = baseId + '/' + quoteId
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
# todo: refactor self
fee = 0.0085 if (quote == 'AUD') else 0.0022
pricePrecision = 2
amountPrecision = 4
minAmount = 0.001 # where does it come from?
minPrice = None
if quote == 'AUD':
if (base == 'XRP') or (base == 'OMG'):
pricePrecision = 4
amountPrecision = -math.log10(minAmount)
minPrice = math.pow(10, -pricePrecision)
precision = {
'amount': amountPrecision,
'price': pricePrecision,
}
limits = {
'amount': {
'min': minAmount,
'max': None,
},
'price': {
'min': minPrice,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
}
result.append({
'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': None,
'maker': fee,
'taker': fee,
'limits': limits,
'precision': precision,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privateGetAccountBalance(params)
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
multiplier = 100000000
total = self.safe_float(balance, 'balance')
if total is not None:
total /= multiplier
used = self.safe_float(balance, 'pendingFunds')
if used is not None:
used /= multiplier
account = self.account()
account['used'] = used
account['total'] = total
result[code] = account
return self.parse_balance(result)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
multiplier = 100000000 # for price and volume
return [
ohlcv[0],
float(ohlcv[1]) / multiplier,
float(ohlcv[2]) / multiplier,
float(ohlcv[3]) / multiplier,
float(ohlcv[4]) / multiplier,
float(ohlcv[5]) / multiplier,
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
'timeWindow': self.timeframes[timeframe],
}
if since is not None:
request['since'] = since
response = self.webGetMarketBTCMarketsIdTickByTime(self.extend(request, params))
return self.parse_ohlcvs(response['ticks'], market, timeframe, since, limit)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
response = self.publicGetMarketIdOrderbook(self.extend(request, params))
timestamp = self.safe_timestamp(response, 'timestamp')
return self.parse_order_book(response, timestamp)
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_timestamp(ticker, 'timestamp')
symbol = None
if market is not None:
symbol = market['symbol']
last = self.safe_float(ticker, 'lastPrice')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_float(ticker, 'bestBid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'bestAsk'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume24h'),
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
response = self.publicGetMarketIdTick(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.safe_timestamp(trade, 'timestamp')
symbol = None
if market is not None:
symbol = market['symbol']
id = self.safe_string(trade, 'tid')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
cost = None
if amount is not None:
if price is not None:
cost = amount * price
return {
'info': trade,
'id': id,
'order': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
# 'since': 59868345231,
'id': market['id'],
}
response = self.publicGetMarketIdTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
multiplier = 100000000 # for price and volume
orderSide = 'Bid' if (side == 'buy') else 'Ask'
request = self.ordered({
'currency': market['quote'],
})
request['currency'] = market['quote']
request['instrument'] = market['base']
request['price'] = int(price * multiplier)
request['volume'] = int(amount * multiplier)
request['orderSide'] = orderSide
request['ordertype'] = self.capitalize(type)
request['clientRequestId'] = str(self.nonce())
response = self.privatePostOrderCreate(self.extend(request, params))
id = self.safe_string(response, 'id')
return {
'info': response,
'id': id,
}
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
for i in range(0, len(ids)):
ids[i] = int(ids[i])
request = {
'orderIds': ids,
}
return self.privatePostOrderCancel(self.extend(request, params))
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
return self.cancel_orders([id])
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
currency = None
cost = None
if market['quote'] == 'AUD':
currency = market['quote']
cost = float(self.cost_to_precision(symbol, amount * price))
else:
currency = market['base']
cost = float(self.amount_to_precision(symbol, amount))
return {
'type': takerOrMaker,
'currency': currency,
'rate': rate,
'cost': float(self.fee_to_precision(symbol, rate * cost)),
}
def parse_my_trade(self, trade, market):
multiplier = 100000000
timestamp = self.safe_integer(trade, 'creationTime')
side = self.safe_float(trade, 'side')
side = 'buy' if (side == 'Bid') else 'sell'
# BTCMarkets always charge in AUD for AUD-related transactions.
feeCurrencyCode = None
symbol = None
if market is not None:
feeCurrencyCode = market['quote'] if (market['quote'] == 'AUD') else market['base']
symbol = market['symbol']
id = self.safe_string(trade, 'id')
price = self.safe_float(trade, 'price')
if price is not None:
price /= multiplier
amount = self.safe_float(trade, 'volume')
if amount is not None:
amount /= multiplier
feeCost = self.safe_float(trade, 'fee')
if feeCost is not None:
feeCost /= multiplier
cost = None
if price is not None:
if amount is not None:
cost = price * amount
orderId = self.safe_string(trade, 'orderId')
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': orderId,
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': {
'currency': feeCurrencyCode,
'cost': feeCost,
},
}
def parse_my_trades(self, trades, market=None, since=None, limit=None):
result = []
for i in range(0, len(trades)):
trade = self.parse_my_trade(trades[i], market)
result.append(trade)
return result
def parse_order(self, order, market=None):
multiplier = 100000000
side = 'buy' if (order['orderSide'] == 'Bid') else 'sell'
type = 'limit' if (order['ordertype'] == 'Limit') else 'market'
timestamp = self.safe_integer(order, 'creationTime')
if market is None:
market = self.market(order['instrument'] + '/' + order['currency'])
status = 'open'
if order['status'] == 'Failed' or order['status'] == 'Cancelled' or order['status'] == 'Partially Cancelled' or order['status'] == 'Error':
status = 'canceled'
elif order['status'] == 'Fully Matched' or order['status'] == 'Partially Matched':
status = 'closed'
price = self.safe_float(order, 'price') / multiplier
amount = self.safe_float(order, 'volume') / multiplier
remaining = self.safe_float(order, 'openVolume', 0.0) / multiplier
filled = amount - remaining
trades = self.parse_my_trades(order['trades'], market)
numTrades = len(trades)
cost = filled * price
average = None
lastTradeTimestamp = None
if numTrades > 0:
cost = 0
for i in range(0, numTrades):
trade = trades[i]
cost = self.sum(cost, trade['cost'])
if filled > 0:
average = cost / filled
lastTradeTimestamp = trades[numTrades - 1]['timestamp']
id = self.safe_string(order, 'id')
return {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': market['symbol'],
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': average,
'status': status,
'trades': trades,
'fee': None,
}
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
ids = [int(id)]
request = {
'orderIds': ids,
}
response = self.privatePostOrderDetail(self.extend(request, params))
numOrders = len(response['orders'])
if numOrders < 1:
raise OrderNotFound(self.id + ' No matching order found: ' + id)
order = response['orders'][0]
return self.parse_order(order)
def create_paginated_request(self, market, since=None, limit=None):
limit = 100 if (limit is None) else limit
since = 0 if (since is None) else since
request = self.ordered({
'currency': market['quoteId'],
'instrument': market['baseId'],
'limit': limit,
'since': since,
})
return request
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchOrders requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderHistory(self.extend(request, params))
return self.parse_orders(response['orders'], market)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchOpenOrders requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderOpen(self.extend(request, params))
return self.parse_orders(response['orders'], market)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
orders = self.fetch_orders(symbol, since, limit, params)
return self.filter_by(orders, 'status', 'closed')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ': fetchMyTrades requires a `symbol` argument.')
self.load_markets()
market = self.market(symbol)
request = self.create_paginated_request(market, since, limit)
response = self.privatePostOrderTradeHistory(self.extend(request, params))
return self.parse_my_trades(response['trades'], market)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
uri = '/' + self.implode_params(path, params)
url = self.urls['api'][api] + uri
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
auth = None
headers = {
'apikey': self.apiKey,
'timestamp': nonce,
}
if method == 'POST':
headers['Content-Type'] = 'application/json'
auth = uri + "\n" + nonce + "\n" # eslint-disable-line quotes
body = self.json(params)
auth += body
else:
query = self.keysort(self.omit(params, self.extract_params(path)))
queryString = ''
if query:
queryString = self.urlencode(query)
url += '?' + queryString
queryString += "\n" # eslint-disable-line quotes
auth = uri + "\n" + queryString + nonce + "\n" # eslint-disable-line quotes
secret = base64.b64decode(self.secret)
signature = self.hmac(self.encode(auth), secret, hashlib.sha512, 'base64')
headers['signature'] = self.decode(signature)
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'success' in response:
if not response['success']:
error = self.safe_string(response, 'errorCode')
message = self.id + ' ' + self.json(response)
if error in self.exceptions:
ExceptionClass = self.exceptions[error]
raise ExceptionClass(message)
else:
raise ExchangeError(message)
| [
"[email protected]"
] | |
7f66de5ad7a64720048a36fbb2fe4e9553f4d762 | cb57a9ea4622b94207d12ea90eab9dd5b13e9e29 | /lc/python/0404_sum_of_left_leaves.py | 8933e5d4e7cc50241ba77f57829793df8661ba7e | [] | no_license | boknowswiki/mytraning | b59585e1e255a7a47c2b28bf2e591aef4af2f09a | 5e2f6ceacf5dec8260ce87e9a5f4e28e86ceba7a | refs/heads/master | 2023-08-16T03:28:51.881848 | 2023-08-10T04:28:54 | 2023-08-10T04:28:54 | 124,834,433 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,263 | py |
# binary tree and dfs
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def sumOfLeftLeaves(self, root: Optional[TreeNode]) -> int:
if not root:
return 0
self.ret = 0
self.helper(root.left, True)
self.helper(root.right, False)
return self.ret
def helper(self, node, is_left):
if not node:
return
if is_left and node.left is None and node.right is None:
self.ret += node.val
return
self.helper(node.left, True)
self.helper(node.right, False)
return
# binary tree and bfs
# time O(n)
# space O(n)
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def sumOfLeftLeaves(self, root: Optional[TreeNode]) -> int:
ret = 0
if not root:
return ret
q = collections.deque([root])
while q:
cur = q.popleft()
if cur.left and cur.left.left is None and cur.left.right is None:
ret += cur.left.val
if cur.left:
q.append(cur.left)
if cur.right:
q.append(cur.right)
return ret
# binary tree and dfs
# time O(n)
# space O(1)
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def sumOfLeftLeaves(self, root: Optional[TreeNode]) -> int:
self.ret = 0
self.helper(root, False)
return self.ret
def helper(self, node, is_left):
if not node:
return
if node.left == None and node.right == None and is_left:
self.ret += node.val
self.helper(node.left, True)
self.helper(node.right, False)
return
| [
"[email protected]"
] | |
a5693d14d44e83141c8b81ec18c896a2cc1e4a0d | 5842d17a6c85f7e135609fc9e9951978ad42e6a5 | /app/migrations/0015_userprofile_tz.py | cfecfa463d489995a488b33d945bc7b61d423689 | [] | no_license | projectdata8thsem/lctva | ca95f56b8f3ceaffe6ef724fddcde2f3edb37e30 | 71522cd65451cc52f65eed2033f4ae2bac52040f | refs/heads/master | 2020-04-07T06:03:45.380412 | 2016-10-11T20:05:34 | 2016-10-11T20:05:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-07 03:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0014_auto_20160102_1636'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='tz',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"[email protected]"
] | |
e647def5e76581edc66f86e284a7583b0c68c1e2 | c32809922bbdb8bef698f9979999187a30823d8f | /setup.py | a5da1976c5aad387385a747d3276c2692a26520b | [
"MIT"
] | permissive | mattdennewitz/pat | 6f98cc19ed7d647dac27ffee6d434fac0be63107 | 782ebf29a75ab9b35d45fc11d2eb7a50fa2f611b | refs/heads/master | 2020-04-01T18:45:35.721013 | 2016-07-27T05:35:20 | 2016-07-27T05:35:20 | 64,279,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
import pip.download
from pip.req import parse_requirements
reqs_txt = os.path.join(os.path.dirname(__file__), 'requirements.txt')
pip_reqs = parse_requirements(reqs_txt, session=pip.download.PipSession())
pip_reqs = [str(obj.req) for obj in pip_reqs]
setup(
name = 'pat',
version = '0.1.0',
description = (
'CLI for testing Xpath and CSS queries on HTML documents'
),
author = 'Matt Dennewitz',
author_email = '[email protected]',
url = 'https://github.com/mattdennewitz/pat',
install_requires = pip_reqs,
include_package_data=True,
packages = find_packages(),
scripts = [
'bin/pat'
],
)
| [
"[email protected]"
] | |
0182379e6cb385585af008abaae27b6f33a00cb1 | cbd2f3db68ec311e0d40f5281111bc3a8f200115 | /modelo/bookings/api/urls.py | ef95c12cefbd96832bd4ab68b6f9dfb1c5b4e6b3 | [] | no_license | CoutinhoElias/fullcalendar | fceff2dfae07508f02ceabf0247bac2da4528224 | 4553828cd1dd20f15a139bf535060b9cee27d3f1 | refs/heads/master | 2020-03-09T20:28:53.020188 | 2018-04-25T20:35:37 | 2018-04-25T20:35:37 | 128,986,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | from django.conf.urls import url
from django.contrib import admin
from modelo.bookings.api.serializers import BookingListFeriadoSerializer
app_name = 'bookings-api'
from .views import (
PostListAPIView,
PostListFeriadoAPIView,
PostDetailAPIView,
PostDeleteAPIView,
PostUpdateAPIView,
PostCreateAPIView,
PostList2APIView, PostUpdate2APIView)
urlpatterns = [
url(r'^$', PostListAPIView.as_view(), name='listapi'),
url(r'^(?P<pk>\d+)/$',PostDetailAPIView.as_view(),name='detail'),
url(r'^(?P<pk>\d+)/delete/$',PostDeleteAPIView.as_view(),name='delete'),
url(r'^(?P<pk>\d+)/edit/$',PostUpdateAPIView.as_view(),name='update'),
url(r'^create/$', PostCreateAPIView.as_view(), name='create'),
url(r'^feriado/$', PostListFeriadoAPIView.as_view(), name='feriado'),
#url(r'^(?P<slug>[\W-]+)/$',PostDetailAPIView.as_view(),name='detail')
url(r'^list/$', PostList2APIView.as_view(), name='listapi2'),
url(r'^(?P<pk>\d+)/edita/$', PostUpdate2APIView.as_view(), name='update2'),
] | [
"[email protected]"
] | |
5e259794546b32d33fda459110047e2905c88230 | 382e1f0fe6349c9c65aeb369114f739ac7b099c6 | /apps/launchie/admin.py | 8c7d468ff4b88caa0c6a5c0c4d5425f0b32acd71 | [] | no_license | iambibhas/lpad | e2350af49ab049aa9627750a8921eceee770446f | 7205791c743cd27f5f7cdb7bb091e4bcde42b0d4 | refs/heads/master | 2020-05-16T22:23:46.528044 | 2012-03-01T21:38:45 | 2012-03-01T21:38:45 | 3,564,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | from django.contrib import admin
from launchie.models import Project
admin.site.register(Project)
| [
"[email protected]"
] | |
0266435bf4f57fd88ee473e431c5d9eff7fce795 | 49536aafb22a77a6caf249c7fadef46d63d24dfe | /tensorflow/tensorflow/contrib/learn/python/learn/estimators/__init__.py | bb9f7f894f49a611d744418418bbce85167cbf4c | [
"Apache-2.0"
] | permissive | wangzhi01/deeplearning-1 | 4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d | 46ab82253d956953b8aa98e97ceb6cd290e82288 | refs/heads/master | 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,817 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""An estimator is a rule for calculating an estimate of a given quantity.
# Estimators
* **Estimators** are used to train and evaluate TensorFlow models.
They support regression and classification problems.
* **Classifiers** are functions that have discrete outcomes.
* **Regressors** are functions that predict continuous values.
## Choosing the correct estimator
* For **Regression** problems use one of the following:
* `LinearRegressor`: Uses linear model.
* `DNNRegressor`: Uses DNN.
* `DNNLinearCombinedRegressor`: Uses Wide & Deep.
* `TensorForestEstimator`: Uses RandomForest.
See tf.contrib.tensor_forest.client.random_forest.TensorForestEstimator.
* `Estimator`: Use when you need a custom model.
* For **Classification** problems use one of the following:
* `LinearClassifier`: Multiclass classifier using Linear model.
* `DNNClassifier`: Multiclass classifier using DNN.
* `DNNLinearCombinedClassifier`: Multiclass classifier using Wide & Deep.
* `TensorForestEstimator`: Uses RandomForest.
See tf.contrib.tensor_forest.client.random_forest.TensorForestEstimator.
* `SVM`: Binary classifier using linear SVMs.
* `LogisticRegressor`: Use when you need custom model for binary
classification.
* `Estimator`: Use when you need custom model for N class classification.
## Pre-canned Estimators
Pre-canned estimators are machine learning estimators premade for general
purpose problems. If you need more customization, you can always write your
own custom estimator as described in the section below.
Pre-canned estimators are tested and optimized for speed and quality.
### Define the feature columns
Here are some possible types of feature columns used as inputs to a pre-canned
estimator.
Feature columns may vary based on the estimator used. So you can see which
feature columns are fed to each estimator in the below section.
```python
sparse_feature_a = sparse_column_with_keys(
column_name="sparse_feature_a", keys=["AB", "CD", ...])
embedding_feature_a = embedding_column(
sparse_id_column=sparse_feature_a, dimension=3, combiner="sum")
sparse_feature_b = sparse_column_with_hash_bucket(
column_name="sparse_feature_b", hash_bucket_size=1000)
embedding_feature_b = embedding_column(
sparse_id_column=sparse_feature_b, dimension=16, combiner="sum")
crossed_feature_a_x_b = crossed_column(
columns=[sparse_feature_a, sparse_feature_b], hash_bucket_size=10000)
real_feature = real_valued_column("real_feature")
real_feature_buckets = bucketized_column(
source_column=real_feature,
boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
```
### Create the pre-canned estimator
DNNClassifier, DNNRegressor, and DNNLinearCombinedClassifier are all pretty
similar to each other in how you use them. You can easily plug in an
optimizer and/or regularization to those estimators.
#### DNNClassifier
A classifier for TensorFlow DNN models.
```python
my_features = [embedding_feature_a, embedding_feature_b]
estimator = DNNClassifier(
feature_columns=my_features,
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
```
#### DNNRegressor
A regressor for TensorFlow DNN models.
```python
my_features = [embedding_feature_a, embedding_feature_b]
estimator = DNNRegressor(
feature_columns=my_features,
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNRegressor(
feature_columns=my_features,
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
```
#### DNNLinearCombinedClassifier
A classifier for TensorFlow Linear and DNN joined training models.
* Wide and deep model
* Multi class (2 by default)
```python
my_linear_features = [crossed_feature_a_x_b]
my_deep_features = [embedding_feature_a, embedding_feature_b]
estimator = DNNLinearCombinedClassifier(
# Common settings
n_classes=n_classes,
weight_column_name=weight_column_name,
# Wide settings
linear_feature_columns=my_linear_features,
linear_optimizer=tf.train.FtrlOptimizer(...),
# Deep settings
dnn_feature_columns=my_deep_features,
dnn_hidden_units=[1000, 500, 100],
dnn_optimizer=tf.train.AdagradOptimizer(...))
```
#### LinearClassifier
Train a linear model to classify instances into one of multiple possible
classes. When number of possible classes is 2, this is binary classification.
```python
my_features = [sparse_feature_b, crossed_feature_a_x_b]
estimator = LinearClassifier(
feature_columns=my_features,
optimizer=tf.train.FtrlOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
```
#### LinearRegressor
Train a linear regression model to predict a label value given observation of
feature values.
```python
my_features = [sparse_feature_b, crossed_feature_a_x_b]
estimator = LinearRegressor(
feature_columns=my_features)
```
### LogisticRegressor
Logistic regression estimator for binary classification.
```python
# See tf.contrib.learn.Estimator(...) for details on model_fn structure
def my_model_fn(...):
pass
estimator = LogisticRegressor(model_fn=my_model_fn)
# Input builders
def input_fn_train:
pass
estimator.fit(input_fn=input_fn_train)
estimator.predict(x=x)
```
#### SVM - Support Vector Machine
Support Vector Machine (SVM) model for binary classification.
Currently only linear SVMs are supported.
```python
my_features = [real_feature, sparse_feature_a]
estimator = SVM(
example_id_column='example_id',
feature_columns=my_features,
l2_regularization=10.0)
```
#### DynamicRnnEstimator
An `Estimator` that uses a recurrent neural network with dynamic unrolling.
```python
problem_type = ProblemType.CLASSIFICATION # or REGRESSION
prediction_type = PredictionType.SINGLE_VALUE # or MULTIPLE_VALUE
estimator = DynamicRnnEstimator(problem_type,
prediction_type,
my_feature_columns)
```
### Use the estimator
There are two main functions for using estimators, one of which is for
training, and one of which is for evaluation.
You can specify different data sources for each one in order to use different
datasets for train and eval.
```python
# Input builders
def input_fn_train: # returns x, Y
...
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, Y
...
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
## Creating Custom Estimator
To create a custom `Estimator`, provide a function to `Estimator`'s
constructor that builds your model (`model_fn`, below):
```python
estimator = tf.contrib.learn.Estimator(
model_fn=model_fn,
model_dir=model_dir) # Where the model's data (e.g., checkpoints)
# are saved.
```
Here is a skeleton of this function, with descriptions of its arguments and
return values in the accompanying tables:
```python
def model_fn(features, targets, mode, params):
# Logic to do the following:
# 1. Configure the model via TensorFlow operations
# 2. Define the loss function for training/evaluation
# 3. Define the training operation/optimizer
# 4. Generate predictions
return predictions, loss, train_op
```
You may use `mode` and check against
`tf.contrib.learn.ModeKeys.{TRAIN, EVAL, INFER}` to parameterize `model_fn`.
In the Further Reading section below, there is an end-to-end TensorFlow
tutorial for building a custom estimator.
## Additional Estimators
There is an additional estimators under
`tensorflow.contrib.factorization.python.ops`:
* Gaussian mixture model (GMM) clustering
## Further reading
For further reading, there are several tutorials with relevant topics,
including:
* [Overview of linear models](../../../tutorials/linear/overview.md)
* [Linear model tutorial](../../../tutorials/wide/index.md)
* [Wide and deep learning tutorial](../../../tutorials/wide_and_deep/index.md)
* [Custom estimator tutorial](../../../tutorials/estimators/index.md)
* [Building input functions](../../../tutorials/input_fn/index.md)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators._sklearn import NotFittedError
from tensorflow.contrib.learn.python.learn.estimators.constants import ProblemType
from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNClassifier
from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNEstimator
from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNRegressor
from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedClassifier
from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedEstimator
from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedRegressor
from tensorflow.contrib.learn.python.learn.estimators.dynamic_rnn_estimator import DynamicRnnEstimator
from tensorflow.contrib.learn.python.learn.estimators.estimator import BaseEstimator
from tensorflow.contrib.learn.python.learn.estimators.estimator import Estimator
from tensorflow.contrib.learn.python.learn.estimators.estimator import GraphRewriteSpec
from tensorflow.contrib.learn.python.learn.estimators.estimator import infer_real_valued_columns_from_input
from tensorflow.contrib.learn.python.learn.estimators.estimator import infer_real_valued_columns_from_input_fn
from tensorflow.contrib.learn.python.learn.estimators.estimator import SKCompat
from tensorflow.contrib.learn.python.learn.estimators.head import binary_svm_head
from tensorflow.contrib.learn.python.learn.estimators.head import Head
from tensorflow.contrib.learn.python.learn.estimators.head import loss_only_head
from tensorflow.contrib.learn.python.learn.estimators.head import multi_class_head
from tensorflow.contrib.learn.python.learn.estimators.head import multi_head
from tensorflow.contrib.learn.python.learn.estimators.head import multi_label_head
from tensorflow.contrib.learn.python.learn.estimators.head import no_op_train_fn
from tensorflow.contrib.learn.python.learn.estimators.head import poisson_regression_head
from tensorflow.contrib.learn.python.learn.estimators.head import regression_head
from tensorflow.contrib.learn.python.learn.estimators.kmeans import KMeansClustering
from tensorflow.contrib.learn.python.learn.estimators.linear import LinearClassifier
from tensorflow.contrib.learn.python.learn.estimators.linear import LinearEstimator
from tensorflow.contrib.learn.python.learn.estimators.linear import LinearRegressor
from tensorflow.contrib.learn.python.learn.estimators.logistic_regressor import LogisticRegressor
from tensorflow.contrib.learn.python.learn.estimators.metric_key import MetricKey
from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModeKeys
from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModelFnOps
from tensorflow.contrib.learn.python.learn.estimators.prediction_key import PredictionKey
from tensorflow.contrib.learn.python.learn.estimators.rnn_common import PredictionType
from tensorflow.contrib.learn.python.learn.estimators.run_config import ClusterConfig
from tensorflow.contrib.learn.python.learn.estimators.run_config import Environment
from tensorflow.contrib.learn.python.learn.estimators.run_config import RunConfig
from tensorflow.contrib.learn.python.learn.estimators.run_config import TaskType
from tensorflow.contrib.learn.python.learn.estimators.svm import SVM
| [
"[email protected]"
] | |
0c7322d41077b64b7c875f3f2f1d39845ebd37f3 | d93159d0784fc489a5066d3ee592e6c9563b228b | /Calibration/EcalAlCaRecoProducers/python/ALCARECOEcalESAlign_Output_cff.py | 4dd5cc22d3f4dbedd90a21eaf5229cad92c545c6 | [] | permissive | simonecid/cmssw | 86396e31d41a003a179690f8c322e82e250e33b2 | 2559fdc9545b2c7e337f5113b231025106dd22ab | refs/heads/CAallInOne_81X | 2021-08-15T23:25:02.901905 | 2016-09-13T08:10:20 | 2016-09-13T08:53:42 | 176,462,898 | 0 | 1 | Apache-2.0 | 2019-03-19T08:30:28 | 2019-03-19T08:30:24 | null | UTF-8 | Python | false | false | 1,006 | py | import FWCore.ParameterSet.Config as cms
OutALCARECOEcalESAlign_noDrop = cms.PSet(
# put this if you have a filter
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOEcalESAlign')
),
outputCommands = cms.untracked.vstring(
'keep ESDCCHeaderBlocksSorted_ecalPreshowerDigis_*_*',
'keep ESDigiCollection_ecalPreshowerDigis_*_*',
'keep ESKCHIPBlocksSorted_ecalPreshowerDigis_*_*',
'keep SiPixelClusteredmNewDetSetVector_ecalAlCaESAlignTrackReducer_*_*',
'keep SiStripClusteredmNewDetSetVector_ecalAlCaESAlignTrackReducer_*_*',
'keep TrackingRecHitsOwned_ecalAlCaESAlignTrackReducer_*_*',
'keep recoTrackExtras_ecalAlCaESAlignTrackReducer_*_*',
'keep recoTracks_ecalAlCaESAlignTrackReducer_*_*',
'keep recoBeamSpot_offlineBeamSpot_*_*'
)
)
import copy
OutALCARECOEcalESAlign=copy.deepcopy(OutALCARECOEcalESAlign_noDrop)
OutALCARECOEcalESAlign.outputCommands.insert(0,"drop *")
| [
"[email protected]"
] | |
0f277c60902e8603255da3dbd08294738084831d | 4fbd844113ec9d8c526d5f186274b40ad5502aa3 | /algorithms/python3/minimum_area_rectangle_ii.py | 708813731bf76371e270627e1c38eb0d4c3a71c5 | [] | no_license | capric8416/leetcode | 51f9bdc3fa26b010e8a1e8203a7e1bcd70ace9e1 | 503b2e303b10a455be9596c31975ee7973819a3c | refs/heads/master | 2022-07-16T21:41:07.492706 | 2020-04-22T06:18:16 | 2020-04-22T06:18:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | # !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Given a set of points in the xy-plane, determine the minimum area of any rectangle formed from these points, with sides not necessarily parallel to the x and y axes.
If there isn't any rectangle, return 0.
Example 1:
Input: [[1,2],[2,1],[1,0],[0,1]]
Output: 2.00000
Explanation: The minimum area rectangle occurs at [1,2],[2,1],[1,0],[0,1], with an area of 2.
Example 2:
Input: [[0,1],[2,1],[1,1],[1,0],[2,0]]
Output: 1.00000
Explanation: The minimum area rectangle occurs at [1,0],[1,1],[2,1],[2,0], with an area of 1.
Example 3:
Input: [[0,3],[1,2],[3,1],[1,3],[2,1]]
Output: 0
Explanation: There is no possible rectangle to form from these points.
Example 4:
Input: [[3,1],[1,1],[0,1],[2,1],[3,3],[3,2],[0,2],[2,3]]
Output: 2.00000
Explanation: The minimum area rectangle occurs at [2,1],[2,3],[3,3],[3,1], with an area of 2.
Note:
1 <= points.length <= 50
0 <= points[i][0] <= 40000
0 <= points[i][1] <= 40000
All points are distinct.
Answers within 10^-5 of the actual value will be accepted as correct.
"""
""" ==================== body ==================== """
class Solution:
def minAreaFreeRect(self, points):
"""
:type points: List[List[int]]
:rtype: float
"""
""" ==================== body ==================== """
| [
"[email protected]"
] | |
b90f6e7917af6e75b2d7465f1ec61af936e8ad06 | a08f5a2a75dcbe8e6871759d6b7a89e0e2f3519e | /pypodman/pypodman/lib/actions/rmi_action.py | 7c3d0bd795b1c9a13a592f15020303f704fb5c37 | [
"Apache-2.0"
] | permissive | 4383/python-podman | 99a7d8906e13811059743feba005a7550ff9f604 | 94a46127cb0db2b6187186788a941ec72af476dd | refs/heads/master | 2020-04-22T10:47:05.035526 | 2019-01-14T14:30:35 | 2019-01-14T14:30:35 | 170,317,428 | 0 | 0 | Apache-2.0 | 2019-02-12T12:52:13 | 2019-02-12T12:52:12 | null | UTF-8 | Python | false | false | 1,339 | py | """Remote client command for deleting images."""
import sys
import podman
from pypodman.lib import AbstractActionBase
class Rmi(AbstractActionBase):
"""Class for removing images from storage."""
@classmethod
def subparser(cls, parent):
"""Add Rmi command to parent parser."""
parser = parent.add_parser('rmi', help='delete image(s)')
parser.add_flag(
'--force',
'-f',
help='force delete of image(s) and associated containers.')
parser.add_argument('targets', nargs='+', help='image id(s) to delete')
parser.set_defaults(class_=cls, method='remove')
def remove(self):
"""Remove image(s)."""
for ident in self._args.targets:
try:
img = self.client.images.get(ident)
img.remove(self._args.force)
print(ident)
except podman.ImageNotFound as e:
sys.stdout.flush()
print(
'Image {} not found.'.format(e.name),
file=sys.stderr,
flush=True)
except podman.ErrorOccurred as e:
sys.stdout.flush()
print(
'{}'.format(e.reason).capitalize(),
file=sys.stderr,
flush=True)
| [
"[email protected]"
] | |
5bedb6acfb11c0720e53b72916dc4b2a3fe2290b | 1843fd5ccb4377240e664acd21ba5a9369eca2ab | /bluebottle/utils/exchange_rates.py | 5a13a2e89fbde1ff2d96b0f6451e16a83362c4b3 | [
"BSD-2-Clause"
] | permissive | raux/bluebottle | ba2e576cebcb6835065004c410b22bd8a6b9ee29 | 49d92b5deb289c1539f99122abc20f845577b879 | refs/heads/master | 2020-03-27T03:20:11.465491 | 2018-08-23T13:09:25 | 2018-08-23T13:09:25 | 145,854,614 | 0 | 0 | null | 2018-08-23T13:05:00 | 2018-08-23T13:04:59 | null | UTF-8 | Python | false | false | 198 | py | from djmoney_rates.utils import convert_money
def convert(money, currency):
""" Convert money object `money` to `currency`."""
return convert_money(money.amount, money.currency, currency)
| [
"[email protected]"
] | |
36b6bc2a275ccfba0a769e03a047842003034ffa | 8d1d1e7677e1a18c00fb295971211d4e29d10896 | /vocal_synthesis/experiments/22feb_testing_b2_rmsprop_rectify.py | 5bbb31566fa7981f6bc49ff20056e209698369f4 | [] | no_license | christopher-beckham/ift6266h16 | 8296d1529f6ce3e209af371283f816a4c6d63ea9 | f141fb0a320c20c2c7b43b46f06b1c68cde183f0 | refs/heads/master | 2021-01-10T13:38:40.733180 | 2016-04-17T02:22:52 | 2016-04-17T02:22:52 | 49,399,600 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,177 | py | import numpy as np
from scipy.io import wavfile
import os
import sys
sys.path.append( os.pardir )
import cPickle as pickle
from lasagne.updates import *
import rnn_experiment as experiment
if __name__ == "__main__":
# e.g. 1000_60sec.pkl
in_pkl = sys.argv[1]
out_pkl = sys.argv[2]
with open(in_pkl) as f:
dat = pickle.load(f)
X_train, X_valid, X_test = dat[0]
sys.stderr.write("X_train shape = %s\n" % str(X_train.shape))
sys.stderr.write("X_valid shape = %s\n" % str(X_valid.shape))
sys.stderr.write("X_test shape = %s\n" % str(X_test.shape))
args = dict()
args["seed"] = 0
args["batch_size"] = 16
args["learning_rate"] = 0.01
args["momentum"] = 0.9
args["num_epochs"] = 5000
args["X_train"] = X_train
args["X_valid"] = X_valid
args["X_test"] = X_test
#args["update_method"] = nesterov_momentum
args["update_method"] = adagrad
args["config"] = "../configurations/19feb_testing_e2_relu.py"
model = experiment.train(args)
sys.stderr.write( "writing to file: %s\n" % (out_pkl) )
with open(out_pkl, "wb") as f:
pickle.dump(model, f, pickle.HIGHEST_PROTOCOL)
| [
"[email protected]"
] | |
b665f56e5dff437feeb254d063d46896215f3942 | bad686ba27539a3d3286418cc3ebf2aa80ae4958 | /src/kits/maker-pi-rp2040-robots/time-of-flight-display-test.py | f633d990bc842f9f8617dc32d4679a845184fb29 | [] | no_license | AaryaBatchu/micropython | f0a31b579b3a998586f26b92036875c93588eca7 | aef7d33937352e9ab6f9615bfc5bf9aa1a9bee57 | refs/heads/main | 2023-08-19T13:33:15.006432 | 2021-10-23T19:06:26 | 2021-10-23T19:06:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | # Test program for VL53L0X
import time
from machine import Pin, I2C
from ssd1306 import SSD1306_I2C
import VL53L0X
I2C0_SDA_PIN = 0
I2C0_SCL_PIN = 1
I2C1_SDA_PIN = 2
I2C1_SCL_PIN = 3
i2c0=machine.I2C(0,sda=machine.Pin(I2C0_SDA_PIN), scl=machine.Pin(I2C0_SCL_PIN))
i2c1=machine.I2C(1,sda=machine.Pin(I2C1_SDA_PIN), scl=machine.Pin(I2C1_SCL_PIN), freq=400000)
oled = SSD1306_I2C(128, 64, i2c0)
tof = VL53L0X.VL53L0X(i2c1)
tof.start()
while True:
tof.read()
print(tof.read())
oled.fill(0)
oled.text("CoderDojo Robot", 0, 0)
oled.text("P1:", 0, 20)
oled.text(str(tof.read()), 40, 20)
oled.show()
time.sleep(0.05)
# tof.stop() | [
"[email protected]"
] | |
105c0808de6a0d4f0fca7819c1b26ac7d1fd97cf | 1cd83cf06ff7ae750ab419ac867d475d38342388 | /play-with-numbers/lychrel.py | ee4e3ef1b0281f78eebe6e5db760bed2cd2e7ead | [
"MIT"
] | permissive | AnuragAnalog/maths | 28e402a980075e0201a699528387a7ca223737e8 | b8fd5398dd8fce657ba5c3c0170b5e493eb9d341 | refs/heads/master | 2022-10-31T14:50:52.114560 | 2022-10-15T19:19:36 | 2022-10-15T19:19:36 | 146,905,452 | 0 | 3 | MIT | 2022-10-15T19:19:37 | 2018-08-31T14:52:56 | C | UTF-8 | Python | false | false | 1,074 | py | #!/usr/bin/python3.6
""" If we take 47, reverse and add, 47 + 74 = 121, which is palindromic.
It is thought that some numbers, like 196, never produce a palindrome.
A number that never forms a palindrome through the reverse and add process
is called a Lychrel number. Due to the theoretical nature of these numbers,
and for the purpose of this problem, we shall assume that a number is Lychrel
until proven otherwise. In addition you are given that for every number below
ten-thousand, it will either (i) become a palindrome in less than fifty
iterations, or, (ii) no one, with all the computing power that exists, has
managed so far to map it to a palindrome. """
def ispalindrome(s):
return s[:] == s[::-1]
def lychrel(n):
tot = n
for i in range(50):
temp = list(str(tot))
temp.reverse()
tot += int("".join(temp))
if ispalindrome(str(tot)):
print(f"Given {n} is not a lychrel number")
return None
print(f"Given {n} is a lychrel number")
n = int(input("Enter the number: "))
lychrel(n)
| [
"[email protected]"
] | |
d0d2170beaa496a695545f31c255a9f64e877438 | 3c38febb2f7a42fb72b543eb23caa9f46580f06b | /Uva Problems/Python/713.py | e95e6f573511da20421cca9b7150a6881a4d1807 | [] | no_license | joyonto51/Programming_Practice | 479b8e84862e35f29a3904bd3556edb6532efb4b | 1d6ff08cf63011cfde6b73988bc1686a12e70a79 | refs/heads/master | 2020-03-09T05:34:39.502942 | 2018-10-01T14:34:17 | 2018-10-01T14:34:17 | 128,617,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | T = int(input())
for i in range(T):
a,b=map(str,input().split())
x = int(a[::-1])
y = int(b[::-1])
w = str(x+y)
z = int(w[::-1])
print("{}".format(z))
| [
"meghla1609"
] | meghla1609 |
09d70c0aefbd48906980a784bebbb7489af1e443 | b6d3ff62c8baa193226414e0a4ee7e8c242340cc | /crf-seq/sets/sets_2/7/seq_detect_1i.py | 3f0c0ed2646bac57d171b6c4b4cdd9046f430e70 | [
"Apache-2.0"
] | permissive | roma-patel/seq | a0c8025c6629f06157d7530f1f21ca6d9c9c146f | 25012b1218b60090f467fe5ed5a15d7a28b3134c | refs/heads/master | 2021-09-16T15:21:53.842958 | 2018-06-21T20:21:16 | 2018-06-21T20:21:16 | 110,298,977 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,349 | py | import pycrfsuite
import sklearn
from itertools import chain
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.preprocessing import LabelBinarizer
import re
import json
annotypes = ['Participants', 'Intervention', 'Outcome']
annotype = annotypes[1]
path = '/nlp/data/romap/crf/'
#path = '/Users/romapatel/Desktop/crf/'
def run():
train_sents, test_sents = get_train_test_sets()
print len(test_sents)
indwords_list = get_ind_words()
patterns_list = get_patterns()
X_train = [sent_features(train_sents[docid], indwords_list, patterns_list) for docid in train_sents.keys()]
y_train = [sent_labels(train_sents[docid]) for docid in train_sents.keys()]
X_test = [sent_features(test_sents[docid], indwords_list, patterns_list) for docid in test_sents.keys()]
y_test = [sent_labels(test_sents[docid]) for docid in test_sents.keys()]
trainer = pycrfsuite.Trainer(verbose=False)
for xseq, yseq in zip(X_train, y_train):
trainer.append(xseq, yseq)
trainer.set_params({'c1': 1.0,'c2': 1e-3, 'max_iterations': 50, 'feature.possible_transitions': True})
trainer.train('PICO.crfsuite')
tagger = pycrfsuite.Tagger()
tagger.open('PICO.crfsuite')
get_results(test_sents, tagger, indwords_list, patterns_list)
def get_results(test_sents, tagger, indwords_list, patterns_list):
f1 = open(path + 'sets_2/7/' + annotype + '-test_pred.json', 'w+')
f2 = open(path + 'sets_2/7/' + annotype + '-test_correct.json', 'w+')
pred_dict, correct_dict = {}, {}
for docid in test_sents:
pred, correct = tagger.tag(sent_features(test_sents[docid], indwords_list, patterns_list)), sent_labels(test_sents[docid])
spans, span, outside = [], [], True
for i in range(len(pred)):
if pred[i] == '0' and outside is True: continue
elif pred[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif pred[i] == '1' and outside is False: continue
elif pred[i] == '1' and outside is True:
outside = False
span.append(i)
pred_dict[docid] = spans
spans, span, outside = [], [], True
for i in range(len(correct)):
if correct[i] == '0' and outside is True: continue
elif correct[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif correct[i] == '1' and outside is False: continue
elif correct[i] == '1' and outside is True:
outside = False
span.append(i)
correct_dict[docid] = spans
f1.write(json.dumps(pred_dict))
f2.write(json.dumps(correct_dict))
def get_ind_words():
fin_list = []
for annotype in annotypes:
list = []
#filename = annotype.lower() + '_words.txt'
filename = annotype.lower() + '_unigrams.tsv'
f = open(path + 'crf_files/' + filename, 'r')
for line in f:
#word = line[:-1]
items = line.split('\t')
word = items[1][:-1]
if word not in list:
list.append(word)
if annotype == 'Intervention':
f = open(path + 'crf_files/drug_names.txt', 'r')
for line in f:
word = line[:-1]
if word not in list:
list.append(word)
fin_list.append(list)
indwords = [fin_list[0], fin_list[1], fin_list[2]]
return indwords
#all lowercased
def get_patterns():
fin_list = []
for annotype in annotypes:
list = []
#filename = annotype.lower() + '_pattern_copy.txt'
filename = annotype.lower() + '_trigrams.tsv'
f = open(path + 'crf_files/' + filename, 'r')
for line in f:
#word = line[:-1].lower()
word = line[:-1].split('\t')
word = word[1]
if word not in list:
list.append(word)
fin_list.append(list)
patterns = [fin_list[0], fin_list[1], fin_list[2]]
return patterns
def isindword(word, annotype, indwords_list):
if annotype == annotypes[0]: list = indwords_list[0]
elif annotype == annotypes[1]: list = indwords_list[1]
else: list = indwords_list[2]
if word.lower() in list or word.lower()[:-1] in list or word.lower()[-3:] in list: return True
else: return False
def ispattern(word, pos, annotype, pattern_list):
if annotype == annotypes[0]: list = pattern_list[0]
elif annotype == annotypes[1]: list = pattern_list[1]
else: list = pattern_list[2]
for pattern in pattern_list:
if word.lower() in pattern or pos.lower() in pattern: return True
else: return False
def word_features(sent, i, indwords_list, pattern_list):
word = sent[i][0]
postag = sent[i][2]
features = ['bias', 'word.lower=' + word.lower(),'word[-3:]=' + word[-3:],
'word[-4:]=' + word[-4:],'word.isupper=%s' % word.isupper(),
'word.istitle=%s' % word.istitle(), 'word.isdigit=%s' % word.isdigit(),
'postag=' + postag, 'isindword=%s' % isindword(word, annotype, indwords_list),
'word[0:4]=' + word[0:4], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)]
#prev previous word
if i > 1:
word1 = sent[i-2][0]
postag1 = sent[i-2][2]
features.extend(['-1:word.lower=' + word1.lower(), '-1:word.istitle=%s' % word1.istitle(),
'-1:word.isupper=%s' % word1.isupper(), '-1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:]])
#previous word
if i > 0:
word1 = sent[i-1][0]
postag1 = sent[i-1][2]
features.extend(['-1:word.lower=' + word1.lower(), '-1:word.istitle=%s' % word1.istitle(),
'-1:word.isupper=%s' % word1.isupper(), '-1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)])
else:
features.append('BOS')
#next to next word
if i < len(sent)-2:
word1 = sent[i+2][0]
postag1 = sent[i+2][2]
features.extend(['+1:word.lower=' + word1.lower(), '+1:word.istitle=%s' % word1.istitle(),
'+1:word.isupper=%s' % word1.isupper(), '+1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:]])
#next word
if i < len(sent)-1:
word1 = sent[i+1][0]
postag1 = sent[i+1][2]
features.extend(['+1:word.lower=' + word1.lower(), '+1:word.istitle=%s' % word1.istitle(),
'+1:word.isupper=%s' % word1.isupper(), '+1:postag=' + postag1,
'isindword=%s' % isindword(word1, annotype, indwords_list), 'word[0:4]=' + word[0:4],
'word[-3:]=' + word[-3:], 'ispattern=%s' % ispattern(word, postag, annotype, pattern_list)])
else:
features.append('EOS')
return features
def sent_features(sent, indwords_list, patterns_list):
return [word_features(sent, i, indwords_list, patterns_list) for i in range(len(sent))]
def sent_labels(sent):
return [str(i_label) for token, ner, postag, p_label, i_label, o_label in sent]
def sent_tokens(sent):
return [token for token, ner, postag, p_label, i_label, o_label in sent]
def print_results(example_sent, tagger, indwords_list, docid, dict):
pred, correct = tagger.tag(sent_features(example_sent, indwords_list)), sent_labels(example_sent)
spans, span, outside = [], [], True
for i in range(len(pred)):
if pred[i] == '0' and outside is True: continue
elif pred[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif pred[i] == '1' and outside is False: continue
elif pred[i] == '1' and outside is True:
outside = False
span.append(i)
f = open(path + annotype + '-test.json', 'w+')
print '\n\nPredicted: ' + str(spans)
for span in spans:
s = ' '
for i in range(span[0], span[1]):
s += example_sent[i][0] + ' '
print s
spans, span, outside = [], [], True
for i in range(len(correct)):
if correct[i] == '0' and outside is True: continue
elif correct[i] == '0' and outside is False:
span.append(i+1)
spans.append(span)
span, outside = [], True
elif correct[i] == '1' and outside is False: continue
elif correct[i] == '1' and outside is True:
outside = False
span.append(i)
print '\n\nCorrect: ' + str(spans)
for span in spans:
s = ' '
for i in range(span[0], span[1]):
s += example_sent[i][0] + ' '
print s
def get_training_data():
f = open(path + 'crf_files/difficulty_crf_mv.json', 'r')
for line in f:
dict = json.loads(line)
return dict
def get_train_test_sets():
test_docids = []
f = open(path + 'crf_files/gold_docids.txt', 'r')
for line in f:
test_docids.append(line[:-1])
doc_dict = get_training_data()
test_sents, train_sents = {}, {}
count = 0
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
count += 1
#if count >= 100: break
if docid not in test_docids:
train_sents[docid] = sents
else:
test_sents[docid] = sents
f = open(path + 'difficulty_new.json', 'r')
for line in f:
doc_dict_new = json.loads(line)
count = 1
for docid in doc_dict_new:
if docid in train_sents.keys(): continue
if count < 4741:
count += 1
continue
if count == 9481: break
train_sents[docid] = doc_dict_new[docid]
count += 1
#added 5 times
docid_new = 0
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
if docid not in test_docids:
train_sents[str(docid_new)] = sents
docid_new += 1
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
if docid not in test_docids:
train_sents[str(docid_new)] = sents
docid_new += 1
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
if docid not in test_docids:
train_sents[str(docid_new)] = sents
docid_new += 1
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
if docid not in test_docids:
train_sents[str(docid_new)] = sents
docid_new += 1
for docid in doc_dict:
sents = doc_dict[docid]
if len(sents) == 0: continue
if docid not in test_docids:
train_sents[str(docid_new)] = sents
docid_new += 1
return train_sents, test_sents
if __name__ == '__main__':
run()
| [
"[email protected]"
] | |
91225add6aa7e75a6cef7c47459470a6ff209b39 | 58d2b60989a83142286273c1d3bdd66a77842c76 | /Wind/Train/RunConfig.py | 0220ddff4b85e2aa74b7fa6c181234275d0f4756 | [] | no_license | castorgit/wind_code | 8193c7439eb459e58a55b8ff107df6032f77ad7d | 6f25302e9ba581f769d22d47b2c86aa2d60de393 | refs/heads/master | 2022-09-24T13:14:59.280976 | 2020-06-06T17:13:58 | 2020-06-06T17:13:58 | 270,047,772 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,846 | py | """
.. module:: RunConfig
RunConfig
*************
:Description: RunConfig
Object to store information from the flags passed to the script for training
:Authors: HPAI-BSC
:Version:
:Created on: 06/07/2018 8:09
"""
__author__ = 'HPAI-BSC'
class RunConfig:
"""Class Runconfig
Stores information from the flags of the script and are not in the configuration file of the experiment
"""
## Implementation to use for RNN
impl = 1
## Activates TF verbose output and other information
verbose = False
## Generate output for tensorboard
tboard = False
## Keep the model with best validation accuracy
best = True
## Early stopping
early = True
## Multi GPU training
multi = False
## Get experiment configuration using the proxy
proxy = False
## Save the final model
save = False
## Get the data from the remote server
remote = False
## Print info of dataset and model at the end of training
info = False
## Not yet used
log = None
def __init__(self, impl=1, verbose=False, tboard=False, best=True, early=True, multi=False, proxy=False, save=False,
remote=False, info=False, log=False):
""" Constructor
Stores the parameters in the object attributes
:param impl:
:param verbose:
:param tboard:
:param best:
:param early:
:param multi:
:param proxy:
:param save:
:param remote:
:param info:
:param log:
"""
self.impl = impl
self.verbose = verbose
self.tboard = tboard
self.best = best
self.early = early
self.multi = multi
self.proxy = proxy
self.save = save
self.remote = remote
self.info = info
self.log = log
| [
"[email protected]"
] | |
927db656a17a58d4953f22fbdc57fd16d812b724 | 27ff7fec0ae3f29f58089a2acab0aa3bc4e6e1f7 | /RIDE-python3/utest/controller/test_tcuk_copy.py | 4d4c8ab42b7c9cad385b512f4c7fb6b0a38d324f | [
"Apache-2.0"
] | permissive | zhangsong1417/xx | 01435d6057364991b649c1acc00b36ab13debe5a | c40cfdede194daf3bdf91b36c1936150577128b9 | refs/heads/master | 2020-04-06T14:06:23.011363 | 2019-07-09T02:38:02 | 2019-07-09T02:38:02 | 157,528,207 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,828 | py | import time
import unittest
from robotide.robotapi import TestCaseFile
from robotide.controller.filecontrollers import TestCaseFileController
from resources import COMPLEX_SUITE_PATH
from nose.tools import assert_equal, assert_true
class TestCaseAndUserKeywordCopyingTest(unittest.TestCase):
controller = TestCaseFileController(
TestCaseFile(source=COMPLEX_SUITE_PATH).populate())
def test_test_case_copy(self):
test = self.controller.tests[0]
copy = test.copy('New Name')
assert_equal(copy.name, 'New Name')
for orig, copied in zip(test.settings, copy.settings):
assert_equal(orig.value, copied.value)
assert_true(copied is not orig)
assert_equal(test.steps, copy.steps)
assert_true(test.steps is not copy.steps)
def test_keyword_copy(self):
test = self.controller.keywords[0]
copy = test.copy('New Name')
assert_equal(copy.name, 'New Name')
for orig, copied in zip(test.settings, copy.settings):
assert_equal(orig.value, copied.value)
assert_true(copied is not orig)
assert_equal(test.steps, copy.steps)
assert_true(test.steps is not copy.steps)
def test_test_copy_performance(self):
self._run_copy_test(self.controller.tests[0])
def test_keyword_copy_performance(self):
self._run_copy_test(self.controller.keywords[0])
def _run_copy_test(self, item):
self._test_copy(item, 10)
self._test_copy(item, 200)
def _test_copy(self, item, count):
start_time = time.time()
for i in range(0, count):
item.copy(str(i))
self.assertTrue(time.time() < (start_time + 2),
"Copy operation takes too long time")
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e09bfea83d8f83c4c3596a4e1384efbdb19d67e1 | f5a1fcc57ba6d1bdd946158d269f861747023977 | /google_problems/problem_19.py | ba9c6737ede40571563275798458611485c593dd | [
"MIT"
] | permissive | younes38/Daily-Coding-Problem | 64f8c4137b86fc44d6202585f009a3a5e05f4eac | ba2b48fbd4d86b2130a396b9d464f2395b9983b6 | refs/heads/master | 2021-07-19T01:41:20.665277 | 2019-11-30T21:00:50 | 2019-11-30T21:00:50 | 225,072,874 | 2 | 0 | MIT | 2019-11-30T21:38:13 | 2019-11-30T21:38:13 | null | UTF-8 | Python | false | false | 463 | py | """This problem was asked by Google.
Implement an LRU (Least Recently Used) cache. It should be able to
be initialized with a cache size n, and contain the following methods:
• set(key, value): sets key to value. If there are already n items in the
cache and we are adding a new item, then it should also
remove the least recently used item.
• get(key): gets the value at key. If no such key exists, return null.
Each operation should run in O(1) time.
""" | [
"[email protected]"
] | |
8ceb1e40fd1074a3ca829be6b58a977dfba353b3 | 828e58fff5f3779b6fddbc029f332345adb97c4f | /yoohalal/apps/dashboard/promotions/app.py | 9a86e30efe48de93a9f59df745206195afa7b01e | [] | no_license | sofyandamha/Django-Oscar-Marketplace-yoohalal | 5c33ac77834dcbc199d5f39971b0f527e9627167 | 8bc1f11b1fe301f1dc5f9f9edfd6c24402effeb3 | refs/heads/master | 2020-03-27T20:52:37.031477 | 2018-09-02T16:03:43 | 2018-09-02T16:03:43 | 147,099,611 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,564 | py | from django.conf.urls import url
from oscar.apps.dashboard.promotions.app import PromotionsDashboardApplication as CorePromotionsDashboardApplication
from oscar.core.loading import get_class
from apps.promotions.conf import PROMOTION_CLASSES
class PromotionsDashboardApplication(CorePromotionsDashboardApplication):
# Dynamically set the CRUD views for all promotion classes
view_names = (
('create_%s_view', 'Create%sView'),
('update_%s_view', 'Update%sView'),
('delete_%s_view', 'Delete%sView')
)
for klass in PROMOTION_CLASSES:
if klass.classname() == 'categoryproductlist':
for attr_name, view_name in view_names:
full_attr_name = attr_name % klass.classname()
full_view_name = view_name % klass.__name__
view = get_class('apps.dashboard.promotions.views', full_view_name)
locals()[full_attr_name] = view
def get_urls(self):
urls = super(PromotionsDashboardApplication, self).get_urls()
for klass in PROMOTION_CLASSES:
if klass.classname() == 'categoryproductlist':
code = klass.classname()
urls += [
url(r'create/%s/' % code,
getattr(self, 'create_%s_view' % code).as_view(),
name='promotion-create-%s' % code),
url(r'^update/(?P<ptype>%s)/(?P<pk>\d+)/$' % code,
getattr(self, 'update_%s_view' % code).as_view(),
name='promotion-update'),
url(r'^delete/(?P<ptype>%s)/(?P<pk>\d+)/$' % code,
getattr(self, 'delete_%s_view' % code).as_view(),
name='promotion-delete')]
return self.post_process_urls(urls)
application = PromotionsDashboardApplication()
| [
"[email protected]"
] | |
2e1034143ccd6580c372cfbb8d6a5d8edcd31a92 | 11c94a2c46a337293a35866788895634dee9c0c6 | /third_party/typ/typ/expectations_parser.py | 09bb7e0d85234aa720ccbf9c569293925145ee83 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | tobeqj/catapult | cce63e58b0fcf5713b942ed73708581491ac2f69 | 4d850033271b29cd51e6343747c75631d335f14c | refs/heads/master | 2020-05-24T14:16:13.287182 | 2019-05-17T22:27:20 | 2019-05-17T23:36:44 | 187,305,972 | 1 | 0 | BSD-3-Clause | 2019-05-18T02:42:59 | 2019-05-18T02:42:59 | null | UTF-8 | Python | false | false | 13,436 | py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# TODO(dpranke): Rename this to 'expectations.py' to remove the 'parser'
# part and make it a bit more generic. Consider if we can reword this to
# also not talk about 'expectations' so much (i.e., to find a clearer way
# to talk about them that doesn't have quite so much legacy baggage), but
# that might not be possible.
import fnmatch
import re
from collections import OrderedDict
from collections import defaultdict
from typ.json_results import ResultType
_EXPECTATION_MAP = {
'Crash': ResultType.Crash,
'Failure': ResultType.Failure,
'Pass': ResultType.Pass,
'Timeout': ResultType.Timeout,
'Skip': ResultType.Skip
}
def _group_to_string(group):
msg = ', '.join(group)
k = msg.rfind(', ')
return msg[:k] + ' and ' + msg[k+2:] if k != -1 else msg
class ParseError(Exception):
def __init__(self, lineno, msg):
super(ParseError, self).__init__('%d: %s' % (lineno, msg))
class Expectation(object):
def __init__(self, reason, test, tags, results, lineno,
retry_on_failure=False):
"""Constructor for expectations.
Args:
reason: String that indicates the reason for the expectation.
test: String indicating which test is being affected.
tags: List of tags that the expectation applies to. Tags are combined
using a logical and, i.e., all of the tags need to be present for
the expectation to apply. For example, if tags = ['Mac', 'Debug'],
then the test must be running with the 'Mac' and 'Debug' tags
set; just 'Mac', or 'Mac' and 'Release', would not qualify.
results: List of outcomes for test. Example: ['Skip', 'Pass']
"""
assert isinstance(reason, basestring) or reason is None
assert isinstance(test, basestring)
self._reason = reason
self._test = test
self._tags = frozenset(tags)
self._results = frozenset(results)
self._lineno = lineno
self.should_retry_on_failure = retry_on_failure
def __eq__(self, other):
return (self.reason == other.reason and self.test == other.test
and self.tags == other.tags and self.results == other.results
and self.lineno == other.lineno)
@property
def reason(self):
return self._reason
@property
def test(self):
return self._test
@property
def tags(self):
return self._tags
@property
def results(self):
return self._results
@property
def lineno(self):
return self._lineno
class TaggedTestListParser(object):
"""Parses lists of tests and expectations for them.
This parser covers the 'tagged' test lists format in:
bit.ly/chromium-test-list-format
Takes raw expectations data as a string read from the expectation file
in the format:
# This is an example expectation file.
#
# tags: [
# Mac Mac10.1 Mac10.2
# Win Win8
# ]
# tags: [ Release Debug ]
crbug.com/123 [ Win ] benchmark/story [ Skip ]
...
"""
TAG_TOKEN = '# tags: ['
# The bug field (optional), including optional subproject.
_MATCH_STRING = r'^(?:(crbug.com/(?:[^/]*/)?\d+) )?'
_MATCH_STRING += r'(?:\[ (.+) \] )?' # The label field (optional).
_MATCH_STRING += r'(\S+) ' # The test path field.
_MATCH_STRING += r'\[ ([^\[.]+) \]' # The expectation field.
_MATCH_STRING += r'(\s+#.*)?$' # End comment (optional).
MATCHER = re.compile(_MATCH_STRING)
def __init__(self, raw_data):
self.tag_sets = []
self.expectations = []
self._tag_to_tag_set = {}
self._parse_raw_expectation_data(raw_data)
def _parse_raw_expectation_data(self, raw_data):
lines = raw_data.splitlines()
lineno = 1
num_lines = len(lines)
tag_sets_intersection = set()
first_tag_line = None
while lineno <= num_lines:
line = lines[lineno - 1].strip()
if line.startswith(self.TAG_TOKEN):
# Handle tags.
if self.expectations:
raise ParseError(lineno,
'Tag found after first expectation.')
if not first_tag_line:
first_tag_line = lineno
right_bracket = line.find(']')
if right_bracket == -1:
# multi-line tag set
tag_set = set(line[len(self.TAG_TOKEN):].split())
lineno += 1
while lineno <= num_lines and right_bracket == -1:
line = lines[lineno - 1].strip()
if line[0] != '#':
raise ParseError(
lineno,
'Multi-line tag set missing leading "#"')
right_bracket = line.find(']')
if right_bracket == -1:
tag_set.update(line[1:].split())
lineno += 1
else:
tag_set.update(line[1:right_bracket].split())
if line[right_bracket+1:]:
raise ParseError(
lineno,
'Nothing is allowed after a closing tag '
'bracket')
else:
if line[right_bracket+1:]:
raise ParseError(
lineno,
'Nothing is allowed after a closing tag '
'bracket')
tag_set = set(
line[len(self.TAG_TOKEN):right_bracket].split())
tag_sets_intersection.update(
(t for t in tag_set if t.lower() in self._tag_to_tag_set))
self.tag_sets.append(tag_set)
self._tag_to_tag_set.update(
{tg.lower(): id(tag_set) for tg in tag_set})
elif line.startswith('#') or not line:
# Ignore, it is just a comment or empty.
lineno += 1
continue
elif not tag_sets_intersection:
self.expectations.append(
self._parse_expectation_line(lineno, line))
else:
break
lineno += 1
if tag_sets_intersection:
is_multiple_tags = len(tag_sets_intersection) > 1
tag_tags = 'tags' if is_multiple_tags else 'tag'
was_were = 'were' if is_multiple_tags else 'was'
error_msg = 'The {0} {1} {2} found in multiple tag sets'.format(
tag_tags, _group_to_string(
sorted(list(tag_sets_intersection))), was_were)
raise ParseError(first_tag_line, error_msg)
def _parse_expectation_line(self, lineno, line):
match = self.MATCHER.match(line)
if not match:
raise ParseError(lineno, 'Syntax error: %s' % line)
# Unused group is optional trailing comment.
reason, raw_tags, test, raw_results, _ = match.groups()
tags = [raw_tag.lower() for raw_tag in raw_tags.split()] if raw_tags else []
tag_set_ids = set()
if '*' in test[:-1]:
raise ParseError(lineno,
'Invalid glob, \'*\' can only be at the end of the pattern')
for t in tags:
if not t in self._tag_to_tag_set:
raise ParseError(lineno, 'Unknown tag "%s"' % t)
else:
tag_set_ids.add(self._tag_to_tag_set[t])
if len(tag_set_ids) != len(tags):
error_msg = ('The tag group contains tags that are '
'part of the same tag set')
tags_by_tag_set_id = defaultdict(list)
for t in tags:
tags_by_tag_set_id[self._tag_to_tag_set[t]].append(t)
for tag_intersection in tags_by_tag_set_id.values():
error_msg += ('\n - Tags %s are part of the same tag set' %
_group_to_string(sorted(tag_intersection)))
raise ParseError(lineno, error_msg)
results = []
retry_on_failure = False
for r in raw_results.split():
try:
# The test expectations may contain expected results and
# the RetryOnFailure tag
if r in _EXPECTATION_MAP:
results.append(_EXPECTATION_MAP[r])
elif r == 'RetryOnFailure':
retry_on_failure = True
else:
raise KeyError
except KeyError:
raise ParseError(lineno, 'Unknown result type "%s"' % r)
# Tags from tag groups will be stored in lower case in the Expectation
# instance. These tags will be compared to the tags passed in to
# the Runner instance which are also stored in lower case.
return Expectation(
reason, test, tags, results, lineno, retry_on_failure)
class TestExpectations(object):
def __init__(self, tags):
self.tags = [tag.lower() for tag in tags]
# Expectations may either refer to individual tests, or globs of
# tests. Each test (or glob) may have multiple sets of tags and
# expected results, so we store these in dicts ordered by the string
# for ease of retrieve. glob_exps use an OrderedDict rather than
# a regular dict for reasons given below.
self.individual_exps = {}
self.glob_exps = OrderedDict()
def parse_tagged_list(self, raw_data):
try:
parser = TaggedTestListParser(raw_data)
except ParseError as e:
return 1, e.message
# TODO(crbug.com/83560) - Add support for multiple policies
# for supporting multiple matching lines, e.g., allow/union,
# reject, etc. Right now, you effectively just get a union.
glob_exps = []
for exp in parser.expectations:
if exp.test.endswith('*'):
glob_exps.append(exp)
else:
self.individual_exps.setdefault(exp.test, []).append(exp)
# Each glob may also have multiple matching lines. By ordering the
# globs by decreasing length, this allows us to find the most
# specific glob by a simple linear search in expected_results_for().
glob_exps.sort(key=lambda exp: len(exp.test), reverse=True)
for exp in glob_exps:
self.glob_exps.setdefault(exp.test, []).append(exp)
return 0, None
def expectations_for(self, test):
# Returns a tuple of (expectations, should_retry_on_failure)
#
# A given test may have multiple expectations, each with different
# sets of tags that apply and different expected results, e.g.:
#
# [ Mac ] TestFoo.test_bar [ Skip ]
# [ Debug Win ] TestFoo.test_bar [ Pass Failure ]
#
# To determine the expected results for a test, we have to loop over
# all of the failures matching a test, find the ones whose tags are
# a subset of the ones in effect, and return the union of all of the
# results. For example, if the runner is running with {Debug, Mac, Mac10.12}
# then lines with no tags, {Mac}, or {Debug, Mac} would all match, but
# {Debug, Win} would not. We also have to set the should_retry_on_failure
# boolean variable to True if any of the expectations have the
# should_retry_on_failure flag set to true
#
# The longest matching test string (name or glob) has priority.
results = set()
should_retry_on_failure = False
# First, check for an exact match on the test name.
for exp in self.individual_exps.get(test, []):
if exp.tags.issubset(self.tags):
results.update(exp.results)
should_retry_on_failure |= exp.should_retry_on_failure
if results or should_retry_on_failure:
return (results or {ResultType.Pass}), should_retry_on_failure
# If we didn't find an exact match, check for matching globs. Match by
# the most specific (i.e., longest) glob first. Because self.globs is
# ordered by length, this is a simple linear search.
for glob, exps in self.glob_exps.items():
if fnmatch.fnmatch(test, glob):
for exp in exps:
if exp.tags.issubset(self.tags):
results.update(exp.results)
should_retry_on_failure |= exp.should_retry_on_failure
# if *any* of the exps matched, results will be non-empty,
# and we're done. If not, keep looking through ever-shorter
# globs.
if results or should_retry_on_failure:
return ((results or {ResultType.Pass}),
should_retry_on_failure)
# Nothing matched, so by default, the test is expected to pass.
return {ResultType.Pass}, False
| [
"[email protected]"
] | |
44d13ab9183844ac238550a00eb7aa2a9725fc68 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-7417.py | ea541f93983bdad42dfad5f037191888021db2c1 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,276 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if $Exp:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
] | |
e0ed0dfefb138ca3c8fd3e937468c210583a4899 | ace717292aec2cbff637da1455e265b2c4f5894b | /azdev/operations/tests/test_break_change.py | 6a81837caf7c11c4c8bb9ba49d0e3b8e9f1cb7b8 | [
"MIT"
] | permissive | Azure/azure-cli-dev-tools | b62b936327c1129d970e7b836b7f926e7c277cce | d4e9490071a5161c3f663e41a99f16b7a091feea | refs/heads/dev | 2023-08-24T12:36:36.386669 | 2023-08-10T05:57:09 | 2023-08-10T05:57:09 | 158,465,443 | 78 | 106 | MIT | 2023-09-14T02:55:18 | 2018-11-20T23:46:08 | Python | UTF-8 | Python | false | false | 2,621 | py | # -----------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# -----------------------------------------------------------------------------
import unittest
import os
from azdev.operations.command_change import export_command_meta, cmp_command_meta
from azdev.operations.command_change.util import get_command_tree
class MyTestCase(unittest.TestCase):
def test_cmd_meta_generation(self):
if os.path.exists("./jsons/az_monitor_meta.json"):
os.remove("./jsons/az_monitor_meta.json")
module_list = ["monitor"]
export_command_meta(modules=module_list, meta_output_path="./jsons/")
self.assertTrue(os.path.exists("./jsons/az_monitor_meta.json"), "new monitor meta generation failed")
def test_parse_cmd_tree(self):
cmd_name = "monitor log-profiles create"
ret = get_command_tree(cmd_name)
self.assertTrue(ret["is_group"], "group parse failed")
self.assertFalse(ret["sub_info"]["sub_info"]["is_group"], "group parse failed")
self.assertTrue(ret["sub_info"]["sub_info"]["cmd_name"] == "monitor log-profiles create", "group parse failed")
def test_diff_meta(self):
if not os.path.exists("./jsons/az_monitor_meta_before.json") \
or not os.path.exists("./jsons/az_monitor_meta_after.json"):
return
result = cmp_command_meta(base_meta_file="./jsons/az_monitor_meta_before.json",
diff_meta_file="./jsons/az_monitor_meta_after.json",
output_type="text")
target_message = [
"please confirm cmd `monitor private-link-scope scoped-resource show` removed",
"sub group `monitor private-link-scope private-endpoint-connection cust` removed",
]
for mes in target_message:
found = False
for line in result:
if line.find(mes) > -1:
found = True
break
self.assertTrue(found, "target message not found")
ignored_message = [
"updated property `is_aaz` from `False` to `True`"
]
for mes in ignored_message:
ignored = True
for line in result:
if line.find(mes) > -1:
ignored = False
break
self.assertTrue(ignored, "ignored message found")
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
989ffac9bbb35c98fef0e0e921d47fbd26eac1d9 | 4e02d5b0b1b0739553fd40bbbdfb0d02c9830350 | /0279_Perfect_Squares.py | a4f7a1558f81bae7ae146a21635bc67ee099f516 | [] | no_license | bingli8802/leetcode | b039ab6af62f0c8992463393f561caafd21056e6 | a509b383a42f54313970168d9faa11f088f18708 | refs/heads/master | 2023-03-29T03:11:45.801090 | 2021-03-23T22:55:16 | 2021-03-23T22:55:16 | 279,321,659 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,738 | py | class Solution(object):
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
dp = [0] * (n+1)
for i in range(1, n+1):
dp[i] = i
j = 1
while i - j*j >= 0:
dp[i] = min(dp[i], dp[i-j*j] + 1)
j += 1
return dp[n]
# 和第一种解法思路类似 但是更好理解 效率也更高
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
# lst is to store all the perfect squares that are smaller or equal to n
# lst用来存放所有比n小的完全平方数
lst = [i*i for i in range(1,n) if i*i <= n]
print lst
# create another list dp to store perfect squares that a number need from 1 to n
# dp存放所有小于n的数 遍历每一个数字
dp = [0] * (n+1)
for num in range(1,n+1):
# number from 1 to 4, for each number, it can be divided into n 1's
# 对于每个数字num来说 都可以拆分成num个1 所以把当前最小完全平方数就设为num
# 比如4 它可以拆分成4个1 min_num = 4
min_num = num
# a temporary list is to store perfect squares that are smaller or equal to current number
# 这个临时list存放所有小于等于4的完全平方数[1,4]
tmp_lst = [c for c in lst if c <= num]
for j in tmp_lst:
# 在dp中找到num-j的完全平方数的个数 dp[4-4]=1
perfectSquares = dp[num-j] + 1
if perfectSquares < min_num:
min_num = perfectSquares
dp[num] = min_num
return dp[n]
| [
"[email protected]"
] | |
b85bd0e003e0af4800ebefdd62eaedba1d847289 | 83cbb5554f488f78b9cc8fddda5749e4a77cd9d7 | /corona_chan/app_celery.py | d2f74a0925bf8a3b6fdd875f3bcee3262aaa42f5 | [
"WTFPL"
] | permissive | dem4ply/corona_chan | 5273beef3bb859314a39bf6948d983db1f9ead38 | e422a6ddd2ea7628d83f9f7eb09dc8b39984971b | refs/heads/master | 2022-01-21T17:34:56.458016 | 2020-03-28T08:12:40 | 2020-03-28T08:12:40 | 252,572,781 | 0 | 0 | WTFPL | 2022-01-06T22:43:34 | 2020-04-02T21:53:46 | Python | UTF-8 | Python | false | false | 398 | py | from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
__all__ = [ 'corona_chan_task' ]
os.environ.setdefault( 'DJANGO_SETTINGS_MODULE', 'corona_chan.settings' )
corona_chan_task = Celery( 'corona_chan' )
corona_chan_task.config_from_object( 'django.conf:settings' )
corona_chan_task.autodiscover_tasks( lambda: settings.INSTALLED_APPS )
| [
"[email protected]"
] | |
d9a61d0432914bf06dd735f4f255e00fbb172535 | 6a3db7061752a7dc7466f3965e1d7aef116b2adc | /core/manager/threadmanager.py | 0a24f226af42726d5611b3b10dedc9d19d0f664f | [
"BSD-2-Clause"
] | permissive | bwbj/imagepy | f3b68b320a82bbb3e0641539a244046e9dc6fa73 | e2ee389f1003fc297a541968dcfd7fd883b91e01 | refs/heads/master | 2021-01-25T09:25:45.077798 | 2017-06-04T16:02:19 | 2017-06-04T16:02:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Jan 14 23:24:32 2017
@author: yxl
"""
import numpy as np, os
import IPy
class ThreadManager:
threads = [] | [
"[email protected]"
] | |
c64c92384af399fffd97ca2e1811f3c53183049a | ad20495c8df427211dba51c93c507365f9fce319 | /tilejetserver/source/models.py | 99a2588f534876baed6957523db4f391849ae42f | [
"LicenseRef-scancode-public-domain"
] | permissive | tilejet/tilejet-server | 779398257c65138c906f3989c63e029dfe45587e | 7bd0caa18cde98a8fd80aeea6e06bbe8aa2fa1be | refs/heads/master | 2021-01-10T02:41:23.553939 | 2015-12-06T07:18:56 | 2015-12-06T07:19:59 | 43,448,267 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,709 | py | import datetime
import logging
import os
import io
import sys
import uuid
from base64 import b64encode
from optparse import make_option
import json
import argparse
import time
import os
import subprocess
import binascii
import re
from django.db import models
from django.db.models import signals
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from tilejetserver.utils import TYPE_TMS, TYPE_TMS_FLIPPED, TYPE_BING, TYPE_WMS, TYPE_CHOICES, IMAGE_EXTENSION_CHOICES
def parse_url(url):
if (url is None) or len(url) == 0:
return None
index = url.rfind('/')
if index != (len(url)-1):
url += '/'
return url
class TileOrigin(models.Model):
TYPE_CHOICES = [
(TYPE_TMS, _("TMS")),
(TYPE_TMS_FLIPPED, _("TMS - Flipped")),
(TYPE_BING, _("Bing")),
(TYPE_WMS, _("WMS"))
]
name = models.CharField(max_length=100)
description = models.CharField(max_length=400, help_text=_('Human-readable description of the services provided by this tile origin.'))
type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_TMS)
cacheable = models.BooleanField(default=True, help_text=_('If true, tiles from the origin might be cached given other constraints. If false, tiles from the origin will never be cached.'))
multiple = models.BooleanField(default=True, help_text=_('If true, make sure to include {slug} in the url to be replaced by each source.'))
auto = models.BooleanField(default=True, help_text=_('Should the proxy automatically create tile sources for this origin?'))
url = models.CharField(max_length=400, help_text=_('Used to generate url for new tilesource. For example, http://c.tile.openstreetmap.org/{z}/{x}/{y}.png.'))
extensions = models.CharField(max_length=400,null=True,blank=True)
pattern = models.CharField(max_length=400,null=True,blank=True)
auth = models.CharField(max_length=400, blank=True, null=True, help_text=_('Authentication or access token. Dynamically replaced in downstream sources by replacing {auth}.'))
def __unicode__(self):
return self.name
class Meta:
ordering = ("name","type")
verbose_name_plural = _("Tile Origins")
def type_title(self):
return unicode([v for i, v in enumerate(TYPE_CHOICES) if v[0] == self.type][0][1]);
#def match(self, url):
# match = None
# # If matches primary pattern, then check secondary patterns/filters.
# if self.pattern:
# match = re.match(self.pattern, url, re.M|re.I)
#patterns = TileOriginPattern.objects.filter(origin__pk=self.pk)
#for pattern in patterns:
# match = pattern.match(url)
# if match:
# break
return match
class TileOriginPattern(models.Model):
origin = models.ForeignKey(TileOrigin,null=True,blank=True,help_text=_('The origin.'))
includes = models.CharField(max_length=400,null=True,blank=True)
excludes = models.CharField(max_length=400,null=True,blank=True)
def __unicode__(self):
return self.origin.name + " - "+str(self.pk)
class Meta:
ordering = ("origin", "includes", "excludes")
verbose_name_plural = _("Tile Origin Patterns")
def match(self,url):
#print "matching includes: "+str(self.includes)
#print "matching excludes: "+str(self.excludes)
#print "matching url: "+str(url)
match = None
if self.includes:
match = re.match(self.includes, url, re.M|re.I)
if self.excludes:
if re.match(self.excludes, url, re.M|re.I):
match = None
#print "match: "+str(match)
return match
class TileSource(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=400, null=True, blank=True, help_text=_('Human-readable description of this tile source.'))
type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_TMS)
auto = models.BooleanField(default=True, help_text=_('Was the tile source created automatically by the proxy or manually by a user?'))
cacheable = models.BooleanField(default=True, help_text=_('If true, tiles from this source might be cached given other constraints. If false, tiles from this source will never be cached.'))
origin = models.ForeignKey(TileOrigin,null=True,blank=True,help_text=_('The Tile Origin, if there is one.'))
url = models.CharField(max_length=400, help_text=_('Standard Tile URL. If applicable, replace {slug} from origin. For example, http://c.tile.openstreetmap.org/{z}/{x}/{y}.{ext}. If url includes {auth}, it is dynamically replaced with the relevant auth token stored with origin.'))
#extensions = models.CharField(max_length=400,null=True,blank=True,choices=IMAGE_EXTENSION_CHOICES)
extensions = models.CharField(max_length=400,null=True,blank=True)
pattern = models.CharField(max_length=400,null=True,blank=True)
extents = models.CharField(max_length=800,blank=True,null=True)
minZoom = models.IntegerField(default=0,null=True,blank=True)
maxZoom = models.IntegerField(default=None,null=True,blank=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ("name",)
verbose_name_plural = _("Tile Sources")
@property
def tileservices(self):
return self.tileservice_set
def type_title(self):
return unicode([v for i, v in enumerate(TYPE_CHOICES) if v[0] == self.type][0][1]);
| [
"[email protected]"
] | |
d046dfadb9f91a4651d440ee1c6c211841633686 | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/slxos/v17r_1_01a/routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/__init__.py | 0bb0151f518a6c84ba89cf606dd97f1974d8b36b | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,312 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class static(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-common-def - based on the path /routing-system/router/router-bgp/address-family/ipv6/ipv6-unicast/default-vrf/af-ipv6-uc-and-vrf-cmds-call-point-holder/redistribute/static. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__redistribute_static','__unicast_static_metric','__static_route_map',)
_yang_name = 'static'
_rest_name = 'static'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__unicast_static_metric = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="unicast-static-metric", rest_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Metric for redistributed routes', u'alt-name': u'metric'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='conn-metric', is_config=True)
self.__static_route_map = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="static-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Route map reference', u'alt-name': u'route-map'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rmap-type', is_config=True)
self.__redistribute_static = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="redistribute-static", rest_name="redistribute-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-run-template': u'$(.?$(../unicast-static-metric?\\r:$(../static-route-map?\\r:redistribute static\n)):\\r)', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'routing-system', u'router', u'router-bgp', u'address-family', u'ipv6', u'ipv6-unicast', u'default-vrf', u'af-ipv6-uc-and-vrf-cmds-call-point-holder', u'redistribute', u'static']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'bgp', u'address-family', u'ipv6', u'unicast', u'redistribute', u'static']
def _get_redistribute_static(self):
"""
Getter method for redistribute_static, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/redistribute_static (empty)
"""
return self.__redistribute_static
def _set_redistribute_static(self, v, load=False):
"""
Setter method for redistribute_static, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/redistribute_static (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_redistribute_static is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_redistribute_static() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="redistribute-static", rest_name="redistribute-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-run-template': u'$(.?$(../unicast-static-metric?\\r:$(../static-route-map?\\r:redistribute static\n)):\\r)', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """redistribute_static must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="redistribute-static", rest_name="redistribute-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-run-template': u'$(.?$(../unicast-static-metric?\\r:$(../static-route-map?\\r:redistribute static\n)):\\r)', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__redistribute_static = t
if hasattr(self, '_set'):
self._set()
def _unset_redistribute_static(self):
self.__redistribute_static = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="redistribute-static", rest_name="redistribute-static", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-run-template': u'$(.?$(../unicast-static-metric?\\r:$(../static-route-map?\\r:redistribute static\n)):\\r)', u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_unicast_static_metric(self):
"""
Getter method for unicast_static_metric, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/unicast_static_metric (conn-metric)
"""
return self.__unicast_static_metric
def _set_unicast_static_metric(self, v, load=False):
"""
Setter method for unicast_static_metric, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/unicast_static_metric (conn-metric)
If this variable is read-only (config: false) in the
source YANG file, then _set_unicast_static_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_unicast_static_metric() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="unicast-static-metric", rest_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Metric for redistributed routes', u'alt-name': u'metric'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='conn-metric', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """unicast_static_metric must be of a type compatible with conn-metric""",
'defined-type': "brocade-bgp:conn-metric",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="unicast-static-metric", rest_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Metric for redistributed routes', u'alt-name': u'metric'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='conn-metric', is_config=True)""",
})
self.__unicast_static_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_unicast_static_metric(self):
self.__unicast_static_metric = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="unicast-static-metric", rest_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Metric for redistributed routes', u'alt-name': u'metric'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='conn-metric', is_config=True)
def _get_static_route_map(self):
"""
Getter method for static_route_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/static_route_map (rmap-type)
"""
return self.__static_route_map
def _set_static_route_map(self, v, load=False):
"""
Setter method for static_route_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv6/ipv6_unicast/default_vrf/af_ipv6_uc_and_vrf_cmds_call_point_holder/redistribute/static/static_route_map (rmap-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_route_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_static_route_map() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="static-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Route map reference', u'alt-name': u'route-map'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rmap-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """static_route_map must be of a type compatible with rmap-type""",
'defined-type': "brocade-bgp:rmap-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="static-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Route map reference', u'alt-name': u'route-map'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rmap-type', is_config=True)""",
})
self.__static_route_map = t
if hasattr(self, '_set'):
self._set()
def _unset_static_route_map(self):
self.__static_route_map = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="static-route-map", rest_name="route-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Route map reference', u'alt-name': u'route-map'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rmap-type', is_config=True)
redistribute_static = __builtin__.property(_get_redistribute_static, _set_redistribute_static)
unicast_static_metric = __builtin__.property(_get_unicast_static_metric, _set_unicast_static_metric)
static_route_map = __builtin__.property(_get_static_route_map, _set_static_route_map)
_pyangbind_elements = {'redistribute_static': redistribute_static, 'unicast_static_metric': unicast_static_metric, 'static_route_map': static_route_map, }
| [
"[email protected]"
] | |
804ae630fba94c13e8ca0673bfb3b16caf9c116b | e7451193592aaee2536924ef03846eee920bcf94 | /ucscentralsdk/mometa/ls/LsIssues.py | ed90bd3be818f6f0f3d288ee7ced886d4a637c0a | [
"Apache-2.0"
] | permissive | vinayravish/ucscentralsdk | eb33191f3c7675561298af8cef9b30f6e220b7b2 | 809a3782d26c69f50cf7237700e107f1a9857870 | refs/heads/master | 2021-01-18T01:51:57.275207 | 2016-07-20T05:37:26 | 2016-07-20T05:37:26 | 62,137,219 | 0 | 0 | null | 2016-06-28T12:00:34 | 2016-06-28T12:00:34 | null | UTF-8 | Python | false | false | 12,494 | py | """This module contains the general information for LsIssues ManagedObject."""
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class LsIssuesConsts():
pass
class LsIssues(ManagedObject):
"""This is LsIssues class."""
consts = LsIssuesConsts()
naming_props = set([])
mo_meta = MoMeta("LsIssues", "lsIssues", "config-issue", VersionMeta.Version112a, "InputOutput", 0xf, [], ["admin", "ls-compute", "ls-config", "ls-server"], [u'computeInstance', u'lsServer'], [u'faultInst'], ["Get"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version112a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []),
"iscsi_config_issues": MoPropertyMeta("iscsi_config_issues", "iscsiConfigIssues", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|static-target-mix|native-vlan|auto-target-auth|iscsi-config|missing-vlan|invalid-target-params|invalid-target-name|initiator-name|ip-addr-dhcp|init-target-passwd|auto-target-init|iscsi-initiator-ip-address|iqn-pool-name|vnic-name|no-luns|target-iscsilif-static-ip|unclassified|init-identity|target-name|no-vlan-ip|invalid-mac|iscsi-cardinality|allowed-vlan|internal-cfg-error|unresolvable-managed-target|auth-profile-same),){0,27}(defaultValue|not-applicable|static-target-mix|native-vlan|auto-target-auth|iscsi-config|missing-vlan|invalid-target-params|invalid-target-name|initiator-name|ip-addr-dhcp|init-target-passwd|auto-target-init|iscsi-initiator-ip-address|iqn-pool-name|vnic-name|no-luns|target-iscsilif-static-ip|unclassified|init-identity|target-name|no-vlan-ip|invalid-mac|iscsi-cardinality|allowed-vlan|internal-cfg-error|unresolvable-managed-target|auth-profile-same){0,1}""", [], []),
"network_config_issues": MoPropertyMeta("network_config_issues", "networkConfigIssues", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|network-feature-capability-mismatch|switch-virtual-if-capacity|conflicting-vlan-access|unsupported-usnic-config|named-vlan-inaccessible|unsupported-multicast-policy|permit-unresolved|unsupported-vmq-config|vlan-port-capacity|pinning-invalid),){0,11}(defaultValue|not-applicable|network-feature-capability-mismatch|switch-virtual-if-capacity|conflicting-vlan-access|unsupported-usnic-config|named-vlan-inaccessible|unsupported-multicast-policy|permit-unresolved|unsupported-vmq-config|vlan-port-capacity|pinning-invalid){0,1}""", [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"server_config_issues": MoPropertyMeta("server_config_issues", "serverConfigIssues", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|insufficient-power-budget|server-position-requirement|boot-configuration-unsupported|processor-type-bios-downgrade-restriction|compute-undiscovered|power-group-requirement|imgsec-policy-invalid|soft-pinning-vlan-mismatch|embedded-controller-not-supported|missing-firmware-image|resource-ownership-conflict|boot-order-pxe|vmedia-policy-unsupported|on-next-boot-unsupported|insufficient-resources|mac-address-assignment|remote-service-profile|server-feature-capability-mismatch|boot-order-jbod-image-path|incompat-bios-for-sriov-vnics|processor-requirement|provsrv-policy-invalid|cimc-downgrade-restriction|unsupported-vic-slot|board-controller-update-unsupported|non-interrupt-fsm-running|boot-order-san-image-path|bootip-policy-invalid|boot-policy-vmedia-invalid|server-type-bios-downgrade-restriction|memory-requirement|system-uuid-assignment|domain-requirement|qualified-pool-without-binding|boot-configuration-invalid|incompatible-bios-image|remote-policy|qos-policy-invalid|processor-type-cimc-downgrade-restriction|compute-unavailable|physical-requirement|hostimg-policy-invalid|vmedia-mount-config-invalid|server-type-cimc-downgrade-restriction|migration|wwnn-derivation-from-vhba|duplicate-address-conflict|unsupported-bios-for-vnic-cdn|boot-order-iscsi),){0,50}(defaultValue|not-applicable|insufficient-power-budget|server-position-requirement|boot-configuration-unsupported|processor-type-bios-downgrade-restriction|compute-undiscovered|power-group-requirement|imgsec-policy-invalid|soft-pinning-vlan-mismatch|embedded-controller-not-supported|missing-firmware-image|resource-ownership-conflict|boot-order-pxe|vmedia-policy-unsupported|on-next-boot-unsupported|insufficient-resources|mac-address-assignment|remote-service-profile|server-feature-capability-mismatch|boot-order-jbod-image-path|incompat-bios-for-sriov-vnics|processor-requirement|provsrv-policy-invalid|cimc-downgrade-restriction|unsupported-vic-slot|board-controller-update-unsupported|non-interrupt-fsm-running|boot-order-san-image-path|bootip-policy-invalid|boot-policy-vmedia-invalid|server-type-bios-downgrade-restriction|memory-requirement|system-uuid-assignment|domain-requirement|qualified-pool-without-binding|boot-configuration-invalid|incompatible-bios-image|remote-policy|qos-policy-invalid|processor-type-cimc-downgrade-restriction|compute-unavailable|physical-requirement|hostimg-policy-invalid|vmedia-mount-config-invalid|server-type-cimc-downgrade-restriction|migration|wwnn-derivation-from-vhba|duplicate-address-conflict|unsupported-bios-for-vnic-cdn|boot-order-iscsi){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version112a, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"storage_config_issues": MoPropertyMeta("storage_config_issues", "storageConfigIssues", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|flexflash-metadata|unsupported-disk-controller-config|unsupported-expand-to-available|unsupported-use-remaining-disks|zone-capacity|duplicated-lun-name|unsupported-hotspare-change|unsupported-vd-modification|disk-role-mismatch|missing-raid-key|orphaned-lun-ref-missing|virtual-drive-access-denied|destructive-local-disk-config|storage-feature-capability-mismatch|insufficient-disks|conflicting-lun-config|unsupported-global-hotspares|drive-cache-not-supported|flexflash-controller|unsupported-controller|invalid-storage-profile-binding|lun-in-use|incompatible-disk-types|storage-path-configuration-error|disk-type-mismatch|orphaned-lun-ref-access-denied|virtual-drive-deletion-in-progress|unsupported-raid-level|incomplete-lun-config|unsupported-order|embedded-controller-not-supported|incompatible-number-of-local-disks|flexflash-card|unsupported-destructive-change|invalid-local-lun-disk-policy-reference|set-proper-order|wwnn-assignment|unsupported-orphan-lun-modification|unsupported-lun-map-modification|unsupported-write-cache-policy|unsupported-io-policy|insufficient-storage-space|order-should-be-unique|virtual-drive-capacity|incompatible-raid-level),){0,46}(defaultValue|not-applicable|flexflash-metadata|unsupported-disk-controller-config|unsupported-expand-to-available|unsupported-use-remaining-disks|zone-capacity|duplicated-lun-name|unsupported-hotspare-change|unsupported-vd-modification|disk-role-mismatch|missing-raid-key|orphaned-lun-ref-missing|virtual-drive-access-denied|destructive-local-disk-config|storage-feature-capability-mismatch|insufficient-disks|conflicting-lun-config|unsupported-global-hotspares|drive-cache-not-supported|flexflash-controller|unsupported-controller|invalid-storage-profile-binding|lun-in-use|incompatible-disk-types|storage-path-configuration-error|disk-type-mismatch|orphaned-lun-ref-access-denied|virtual-drive-deletion-in-progress|unsupported-raid-level|incomplete-lun-config|unsupported-order|embedded-controller-not-supported|incompatible-number-of-local-disks|flexflash-card|unsupported-destructive-change|invalid-local-lun-disk-policy-reference|set-proper-order|wwnn-assignment|unsupported-orphan-lun-modification|unsupported-lun-map-modification|unsupported-write-cache-policy|unsupported-io-policy|insufficient-storage-space|order-should-be-unique|virtual-drive-capacity|incompatible-raid-level){0,1}""", [], []),
"vnic_config_issues": MoPropertyMeta("vnic_config_issues", "vnicConfigIssues", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|adaptor-protected-eth-capability|vif-resources-overprovisioned|ungrouped-domain|unsupported-nvgre|unsupported-adaptor-for-vnic-cdn|unresolved-remote-vlan-name|invalid-wwn|service-profile-virtualization-conflict|unsupported-roce-netflow|unsupported-vxlan-netflow|fcoe-capacity|wwpn-derivation-virtualized-port|unresolved-vlan-name|vnic-virtualization-netflow-conflict|unsupported-vxlan-usnic|unsupported-roce-properties|pinning-vlan-mismatch|adaptor-requirement|vnic-not-ha-ready|missing-ipv4-inband-mgmt-addr|unsupported-nvgre-dynamic-vnic|duplicate-vnic-cdn-name|unresolved-remote-vsan-name|mac-derivation-virtualized-port|vnic-virtualization-conflict|unsupported-roce|unsupported-nvgre-netflow|vnic-vlan-assignment-error|insufficient-vhba-capacity|inaccessible-vlan|unable-to-update-ucsm|soft-pinning-vlan-mismatch|unsupported-roce-usnic|unsupported-nvgre-vmq|connection-placement|vnic-vcon-provisioning-change|missing-ipv6-inband-mgmt-addr|unsupported-nvgre-usnic|insufficient-roce-resources|missing-primary-vlan|adaptor-fcoe-capability|vfc-vnic-pvlan-conflict|virtualization-not-supported|unsupported-vxlan|unsupported-roce-nvgre|unresolved-vsan-name|insufficient-vnic-capacity|unassociated-vlan|unsupported-roce-vmq|unsupported-roce-vxlan|unsupported-vxlan-vmq|dynamic-vf-vnic|wwpn-assignment|missing-ipv4-addr|unsupported-vxlan-dynamic-vnic|pinned-target-misconfig|unsupported-vmq-resources),){0,58}(defaultValue|not-applicable|adaptor-protected-eth-capability|vif-resources-overprovisioned|ungrouped-domain|unsupported-nvgre|unsupported-adaptor-for-vnic-cdn|unresolved-remote-vlan-name|invalid-wwn|service-profile-virtualization-conflict|unsupported-roce-netflow|unsupported-vxlan-netflow|fcoe-capacity|wwpn-derivation-virtualized-port|unresolved-vlan-name|vnic-virtualization-netflow-conflict|unsupported-vxlan-usnic|unsupported-roce-properties|pinning-vlan-mismatch|adaptor-requirement|vnic-not-ha-ready|missing-ipv4-inband-mgmt-addr|unsupported-nvgre-dynamic-vnic|duplicate-vnic-cdn-name|unresolved-remote-vsan-name|mac-derivation-virtualized-port|vnic-virtualization-conflict|unsupported-roce|unsupported-nvgre-netflow|vnic-vlan-assignment-error|insufficient-vhba-capacity|inaccessible-vlan|unable-to-update-ucsm|soft-pinning-vlan-mismatch|unsupported-roce-usnic|unsupported-nvgre-vmq|connection-placement|vnic-vcon-provisioning-change|missing-ipv6-inband-mgmt-addr|unsupported-nvgre-usnic|insufficient-roce-resources|missing-primary-vlan|adaptor-fcoe-capability|vfc-vnic-pvlan-conflict|virtualization-not-supported|unsupported-vxlan|unsupported-roce-nvgre|unresolved-vsan-name|insufficient-vnic-capacity|unassociated-vlan|unsupported-roce-vmq|unsupported-roce-vxlan|unsupported-vxlan-vmq|dynamic-vf-vnic|wwpn-assignment|missing-ipv4-addr|unsupported-vxlan-dynamic-vnic|pinned-target-misconfig|unsupported-vmq-resources){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"iscsiConfigIssues": "iscsi_config_issues",
"networkConfigIssues": "network_config_issues",
"rn": "rn",
"serverConfigIssues": "server_config_issues",
"status": "status",
"storageConfigIssues": "storage_config_issues",
"vnicConfigIssues": "vnic_config_issues",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.iscsi_config_issues = None
self.network_config_issues = None
self.server_config_issues = None
self.status = None
self.storage_config_issues = None
self.vnic_config_issues = None
ManagedObject.__init__(self, "LsIssues", parent_mo_or_dn, **kwargs)
| [
"[email protected]"
] | |
847223e52750a1c83489ec6b1a24eef605dc399e | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/centernet2/projects/CenterNet2/centernet/modeling/layers/deform_conv.py | 1f769c8cc406ad284f290a81f443686934da2c31 | [
"Apache-2.0",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 4,579 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import torch
from torch import nn
from detectron2.layers import Conv2d
class _NewEmptyTensorOp(torch.autograd.Function):
@staticmethod
def forward(ctx, x, new_shape):
ctx.shape = x.shape
return x.new_empty(new_shape)
@staticmethod
def backward(ctx, grad):
shape = ctx.shape
return _NewEmptyTensorOp.apply(grad, shape), None
class DFConv2d(nn.Module):
"""Deformable convolutional layer"""
def __init__(
self,
in_channels,
out_channels,
with_modulated_dcn=True,
kernel_size=3,
stride=1,
groups=1,
dilation=1,
deformable_groups=1,
bias=False,
padding=None
):
super(DFConv2d, self).__init__()
if isinstance(kernel_size, (list, tuple)):
assert isinstance(stride, (list, tuple))
assert isinstance(dilation, (list, tuple))
assert len(kernel_size) == 2
assert len(stride) == 2
assert len(dilation) == 2
padding = (
dilation[0] * (kernel_size[0] - 1) // 2,
dilation[1] * (kernel_size[1] - 1) // 2
)
offset_base_channels = kernel_size[0] * kernel_size[1]
else:
padding = dilation * (kernel_size - 1) // 2
offset_base_channels = kernel_size * kernel_size
if with_modulated_dcn:
from detectron2.layers.deform_conv import ModulatedDeformConv
offset_channels = offset_base_channels * 3 # default: 27
conv_block = ModulatedDeformConv
else:
from detectron2.layers.deform_conv import DeformConv
offset_channels = offset_base_channels * 2 # default: 18
conv_block = DeformConv
self.offset = Conv2d(
in_channels,
deformable_groups * offset_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
groups=1,
dilation=dilation
)
nn.init.constant_(self.offset.weight, 0)
nn.init.constant_(self.offset.bias, 0)
'''
for l in [self.offset, ]:
nn.init.kaiming_uniform_(l.weight, a=1)
torch.nn.init.constant_(l.bias, 0.)
'''
self.conv = conv_block(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
deformable_groups=deformable_groups,
bias=bias
)
self.with_modulated_dcn = with_modulated_dcn
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
self.dilation = dilation
self.offset_split = offset_base_channels * deformable_groups * 2
def forward(self, x, return_offset=False):
if x.numel() > 0:
if not self.with_modulated_dcn:
offset_mask = self.offset(x)
x = self.conv(x, offset_mask)
else:
offset_mask = self.offset(x)
offset = offset_mask[:, :self.offset_split, :, :]
mask = offset_mask[:, self.offset_split:, :, :].sigmoid()
x = self.conv(x, offset, mask)
if return_offset:
return x, offset_mask
return x
# get output shape
output_shape = [
(i + 2 * p - (di * (k - 1) + 1)) // d + 1
for i, p, di, k, d in zip(
x.shape[-2:],
self.padding,
self.dilation,
self.kernel_size,
self.stride
)
]
output_shape = [x.shape[0], self.conv.weight.shape[0]] + output_shape
return _NewEmptyTensorOp.apply(x, output_shape) | [
"[email protected]"
] | |
ede7c645eab3f1f455a1d33accfd7f85666937fa | afebbb07b2b4eada17a5853c1ce63b4075d280df | /marketsim/gen/_out/_imultiassetstrategy.py | af12f65be98a441b14e7d2cd86e9acf0c7e41cf3 | [] | no_license | peter1000/marketsimulator | 8c0a55fc6408b880311d3ad49defc55e9af57824 | 1b677200a9d5323f2970c83f076c2b83d39d4fe6 | refs/heads/master | 2021-01-18T01:39:04.869755 | 2015-03-29T17:47:24 | 2015-03-29T17:47:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | class IMultiAssetStrategy(object):
pass
| [
"[email protected]"
] | |
5e3edb681874b53ffa1b0407e4f3f44e9c9564ce | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-gaussdb/huaweicloudsdkgaussdb/v3/model/mysql_flavors_info.py | f645f35059fe903ce4c4505600efa70fabee5fdc | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,889 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class MysqlFlavorsInfo:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'vcpus': 'str',
'ram': 'str',
'type': 'str',
'id': 'str',
'spec_code': 'str',
'version_name': 'str',
'instance_mode': 'str',
'az_status': 'dict(str, str)'
}
attribute_map = {
'vcpus': 'vcpus',
'ram': 'ram',
'type': 'type',
'id': 'id',
'spec_code': 'spec_code',
'version_name': 'version_name',
'instance_mode': 'instance_mode',
'az_status': 'az_status'
}
def __init__(self, vcpus=None, ram=None, type=None, id=None, spec_code=None, version_name=None, instance_mode=None, az_status=None):
"""MysqlFlavorsInfo - a model defined in huaweicloud sdk"""
self._vcpus = None
self._ram = None
self._type = None
self._id = None
self._spec_code = None
self._version_name = None
self._instance_mode = None
self._az_status = None
self.discriminator = None
self.vcpus = vcpus
self.ram = ram
self.type = type
self.id = id
self.spec_code = spec_code
self.version_name = version_name
self.instance_mode = instance_mode
self.az_status = az_status
@property
def vcpus(self):
"""Gets the vcpus of this MysqlFlavorsInfo.
CPU大小。例如:1表示1U。
:return: The vcpus of this MysqlFlavorsInfo.
:rtype: str
"""
return self._vcpus
@vcpus.setter
def vcpus(self, vcpus):
"""Sets the vcpus of this MysqlFlavorsInfo.
CPU大小。例如:1表示1U。
:param vcpus: The vcpus of this MysqlFlavorsInfo.
:type: str
"""
self._vcpus = vcpus
@property
def ram(self):
"""Gets the ram of this MysqlFlavorsInfo.
内存大小,单位为GB。
:return: The ram of this MysqlFlavorsInfo.
:rtype: str
"""
return self._ram
@ram.setter
def ram(self, ram):
"""Sets the ram of this MysqlFlavorsInfo.
内存大小,单位为GB。
:param ram: The ram of this MysqlFlavorsInfo.
:type: str
"""
self._ram = ram
@property
def type(self):
"""Gets the type of this MysqlFlavorsInfo.
规格类型,取值为arm和x86。
:return: The type of this MysqlFlavorsInfo.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this MysqlFlavorsInfo.
规格类型,取值为arm和x86。
:param type: The type of this MysqlFlavorsInfo.
:type: str
"""
self._type = type
@property
def id(self):
"""Gets the id of this MysqlFlavorsInfo.
规格ID,该字段唯一
:return: The id of this MysqlFlavorsInfo.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this MysqlFlavorsInfo.
规格ID,该字段唯一
:param id: The id of this MysqlFlavorsInfo.
:type: str
"""
self._id = id
@property
def spec_code(self):
"""Gets the spec_code of this MysqlFlavorsInfo.
资源规格编码,同创建指定的flavor_ref。例如:gaussdb.mysql.xlarge.x86.4。
:return: The spec_code of this MysqlFlavorsInfo.
:rtype: str
"""
return self._spec_code
@spec_code.setter
def spec_code(self, spec_code):
"""Sets the spec_code of this MysqlFlavorsInfo.
资源规格编码,同创建指定的flavor_ref。例如:gaussdb.mysql.xlarge.x86.4。
:param spec_code: The spec_code of this MysqlFlavorsInfo.
:type: str
"""
self._spec_code = spec_code
@property
def version_name(self):
"""Gets the version_name of this MysqlFlavorsInfo.
数据库版本号。
:return: The version_name of this MysqlFlavorsInfo.
:rtype: str
"""
return self._version_name
@version_name.setter
def version_name(self, version_name):
"""Sets the version_name of this MysqlFlavorsInfo.
数据库版本号。
:param version_name: The version_name of this MysqlFlavorsInfo.
:type: str
"""
self._version_name = version_name
@property
def instance_mode(self):
"""Gets the instance_mode of this MysqlFlavorsInfo.
实例类型。目前仅支持Cluster。
:return: The instance_mode of this MysqlFlavorsInfo.
:rtype: str
"""
return self._instance_mode
@instance_mode.setter
def instance_mode(self, instance_mode):
"""Sets the instance_mode of this MysqlFlavorsInfo.
实例类型。目前仅支持Cluster。
:param instance_mode: The instance_mode of this MysqlFlavorsInfo.
:type: str
"""
self._instance_mode = instance_mode
@property
def az_status(self):
"""Gets the az_status of this MysqlFlavorsInfo.
规格所在az的状态,包含以下状态: - normal,在售 - unsupported,暂不支持该规格 - sellout,售罄。
:return: The az_status of this MysqlFlavorsInfo.
:rtype: dict(str, str)
"""
return self._az_status
@az_status.setter
def az_status(self, az_status):
"""Sets the az_status of this MysqlFlavorsInfo.
规格所在az的状态,包含以下状态: - normal,在售 - unsupported,暂不支持该规格 - sellout,售罄。
:param az_status: The az_status of this MysqlFlavorsInfo.
:type: dict(str, str)
"""
self._az_status = az_status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MysqlFlavorsInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
f98b586ac029d55f77d430b036cbbe0642135da8 | 0660cfca0799685969f75b4082455a5608e23bc4 | /TensorFlow2/Recommendation/WideAndDeep/data/outbrain/nvtabular/preproc.py | 52240c1143495c764743a4c98d2bd58e20e4257a | [] | no_license | resemble-ai/DeepLearningExamples | c88dc66930bc2c4627fd187fc0034e783e6244d3 | 2d555548b698e4fc207965b7121f525c37e0401c | refs/heads/master | 2023-04-12T17:23:45.349726 | 2021-04-26T13:00:10 | 2021-04-26T13:00:10 | 362,784,409 | 4 | 3 | null | 2021-04-29T10:51:42 | 2021-04-29T10:51:41 | null | UTF-8 | Python | false | false | 1,995 | py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
os.environ['TF_MEMORY_ALLOCATION'] = "0.0"
from data.outbrain.nvtabular.utils.converter import nvt_to_tfrecords
from data.outbrain.nvtabular.utils.workflow import execute_pipeline
from data.outbrain.nvtabular.utils.arguments import parse_args
from data.outbrain.nvtabular.utils.setup import create_config
def is_empty(path):
return not os.path.exists(path) or (not os.path.isfile(path) and not os.listdir(path))
def main():
args = parse_args()
config = create_config(args)
if is_empty(args.metadata_path):
logging.warning('Creating new stats data into {}'.format(config['stats_file']))
execute_pipeline(config)
else:
logging.warning('Directory is not empty {args.metadata_path}')
logging.warning('Skipping NVTabular preprocessing')
if os.path.exists(config['output_train_folder']) and os.path.exists(config['output_valid_folder']):
if is_empty(config['tfrecords_path']):
logging.warning('Executing NVTabular parquets to TFRecords conversion')
nvt_to_tfrecords(config)
else:
logging.warning(f"Directory is not empty {config['tfrecords_path']}")
logging.warning('Skipping TFrecords conversion')
else:
logging.warning(f'Train and validation dataset not found in {args.metadata_path}')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
8319f4003ac2e4e7bc59b13fa6bded86492c5bf2 | d89cbdfbb67e46d43a05edb2abeb5e1deedfd3a2 | /functions/shake_shake_function.py | 2b38e2c551c100ed70044885d915227a673d2921 | [
"MIT"
] | permissive | minhtannguyen/pytorch_shake_shake | c9a6fb520ba2201c1ccb022767cbba963225bbae | d7f245d8d8b9e81a6020aadb438ffeae6d5593c2 | refs/heads/master | 2020-03-27T14:22:09.646020 | 2018-09-01T02:00:25 | 2018-09-01T02:00:25 | 146,658,651 | 0 | 0 | MIT | 2018-08-29T21:07:24 | 2018-08-29T21:07:24 | null | UTF-8 | Python | false | false | 1,363 | py | # coding: utf-8
import torch
from torch.autograd import Function
class ShakeFunction(Function):
@staticmethod
def forward(ctx, x1, x2, alpha, beta):
ctx.save_for_backward(x1, x2, alpha, beta)
y = x1 * alpha + x2 * (1 - alpha)
return y
@staticmethod
def backward(ctx, grad_output):
x1, x2, alpha, beta = ctx.saved_variables
grad_x1 = grad_x2 = grad_alpha = grad_beta = None
if ctx.needs_input_grad[0]:
grad_x1 = grad_output * beta
if ctx.needs_input_grad[1]:
grad_x2 = grad_output * (1 - beta)
return grad_x1, grad_x2, grad_alpha, grad_beta
shake_function = ShakeFunction.apply
def get_alpha_beta(batch_size, shake_config, is_cuda):
forward_shake, backward_shake, shake_image = shake_config
if forward_shake and not shake_image:
alpha = torch.rand(1)
elif forward_shake and shake_image:
alpha = torch.rand(batch_size).view(batch_size, 1, 1, 1)
else:
alpha = torch.FloatTensor([0.5])
if backward_shake and not shake_image:
beta = torch.rand(1)
elif backward_shake and shake_image:
beta = torch.rand(batch_size).view(batch_size, 1, 1, 1)
else:
beta = torch.FloatTensor([0.5])
if is_cuda:
alpha = alpha.cuda()
beta = beta.cuda()
return alpha, beta
| [
"[email protected]"
] | |
94c31b6f766395c271029a4a360588b51ce03a06 | bc40c23403c1926e5bb81d3f6b6be567ccee7a70 | /ecommerce/user_model/models.py | 049da65fb7f8b02f6947ab77d96e101120f2e8e3 | [] | no_license | hrupesh/ecommerce | e1e0abf624ad3222bcf57548f051a3197ae899e5 | 2fc645e0e43f1920d787612e8f5b47d5990c793e | refs/heads/master | 2020-04-20T01:28:35.977058 | 2019-01-31T14:51:20 | 2019-01-31T14:51:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,178 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
import hashlib
# Create your models here.
class register_model(models.Model):
firstname = models.CharField(max_length=250, help_text='Required')
lastname = models.CharField(max_length=250, help_text='Required')
username = models.CharField(max_length=250, help_text='Required')
email = models.EmailField(max_length=250, help_text='Required')
contact_no = models.IntegerField(null=True)
slug = models.SlugField(unique = True, null=True)
password = models.CharField(max_length=100)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
verified = models.BooleanField(default = False)
is_active = models.BooleanField(default = False)
email_confirmed = models.BooleanField(default=False)
USERNAME_FIELD = 'username'
REQUIRED_FIELD = ['firstname','lastname']
'''def get_absolute_url(self):
return reverse('details', kwargs={'pk':self.pk})'''
def __str__(self):
return self.email
| [
"[email protected]"
] | |
db865ebb216e0b6efcc8947e4b2809b42095bcd7 | 07c3034f7b6ef88e08430b8c908613ea0091f1b6 | /Labs/Lab 7.py | 540d228ec6bf38673cdb8a3ea31a04a997b03f8a | [] | no_license | HelalChow/Data-Structures | 494b8fabcdf1cac20db78055547ce4160ad6a018 | 3b3401cbd23e01b2d7d95dfc3b95451ca179cee9 | refs/heads/master | 2021-10-19T00:24:23.589224 | 2019-02-15T22:34:05 | 2019-02-15T22:34:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,395 | py | #Question 1
def powers_of_two(num):
for i in range(1,num+1):
yield 2**i
#Question 2
def decimal_to_binary(int):
if int==1:
return "1"
else:
hold = decimal_to_binary(int-1)
#Question 3
def partition(lst):
low = 1
high = len(lst)-1
pivot = lst[0]
while low <= high:
if lst[low]>pivot>lst[high]:
lst[low],lst[high]=lst[high],lst[low]
low+=1
high-=1
elif lst[low]<pivot>lst[high]:
low+=1
elif lst[low]>pivot<lst[high]:
high-=1
else:
low+=1
high-=1
lst[low-1],lst[0]=lst[0],lst[low-1]
lst = [54, 26, 93, 17, 77, 31, 44, 55, 20]
partition(lst)
print(lst)
class MyString():
def __init__(self,str_input):
self.str = str_input
def __len__(self):
return len(self.str)
def __iter__(self):
for i in range(len(self)):
return self[i]
def __repr__(self):
str = ''
return str
def __getitem__(self, ind):
if ind >= len(self):
raise IndexError("Index is out of range")
return self[ind]
def __add__(self, other):
new = self
return new+other
def __radd__(self, other):
new = self
return new + other
def upper(self):
new = ''
| [
"[email protected]"
] | |
cf2d48e9ab4da0ece9b9ee4ddd631919aa2261e1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2790/60720/237340.py | ea1258231b7d219c2f8c047b47fb50bd69d1f997 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | list=input().split()
size1=int(list[0])
size2=int(list[1])
list1=input().split()
list2=input().split()
for i in range(size1):
list1[i]=int(list1[i])
for i in range(size2):
list2[i]=int(list2[i])
count=0
for i in range(size2-1):
for j in range(size1):
if list1[j]<=list2[i]:
count=count+1
print(count,end=' ')
count=0
for j in range(size1):
if list1[j]<=list2[size2-1]:
count=count+1
print(count) | [
"[email protected]"
] | |
e8d4272f4fa006bd29293b9b21adeec8080c6d47 | 3b15dc211cb6c034f4d843b1bbc540f1699182f7 | /爬虫/多线程爬取.py | 5fb5a954f28cf987da7a0401528b87219b49f4f9 | [] | no_license | Hanlen520/- | eea27d5a1272272a2f47f9070f285e44300ab0d2 | 308d3401ff368cd11f4aeac7949d57c3136f2b6e | refs/heads/master | 2023-03-15T19:18:25.168665 | 2020-07-05T14:39:41 | 2020-07-05T14:39:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,074 | py | import requests
import re
import threading
import os
num = 1
def get_html_text_one(url, headers):
request = requests.get(url, headers=headers)
if request.status_code == 200:
return request.text
return request
def get_images_one(html, headers):
urls = re.findall('><a href="(.*?)" alt=".*?".*?"><img src=.*? alt="" data-src=.*? data-nclazyload="true"></a>',html)
for url in urls:
file_name = url.split('/')[-1]
global num
print('正在下载p第{}'.format(num))
request = requests.get(url, headers=headers)
with open('你与星河,皆可收藏' + '/' + file_name, 'wb') as f:
f.write(request.content)
num += 1
print("=======================================================================================================")
def get_images_two():
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36'
}
dir_name = '妹子自拍'
if not os.path.exists(dir_name):
os.mkdir(dir_name)
global num
for i in range(1, 3):
urls = 'https://www.mzitu.com/jiepai/comment-page-{}/#comments'.format(i)
request = requests.get(urls, headers=headers)
html = request.text
urs = re.findall('<p><img class=".*?" src=".*?" data-original="(.*?)" width="640" height="auto"/></p>', html)
for uls in urs:
file_name = uls.split('/')[-1]
print('正在下载p1第{}张图片'.format(num))
request_a = requests.get(uls, headers=headers)
with open(dir_name + '/' + file_name, 'wb') as f:
f.write(request_a.content)
num += 1
def get_images_two_three():
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36'
}
dir_name = '妹子自拍'
if not os.path.exists(dir_name):
os.mkdir(dir_name)
global num
for i in range(4, 6):
urls = 'https://www.mzitu.com/jiepai/comment-page-{}/#comments'.format(i)
request = requests.get(urls, headers=headers)
html = request.text
urs = re.findall('<p><img class=".*?" src=".*?" data-original="(.*?)" width="640" height="auto"/></p>', html)
for uls in urs:
file_name = uls.split('/')[-1]
print('正在下载p1第{}张图片'.format(num))
request_a = requests.get(uls, headers=headers)
with open(dir_name + '/' + file_name, 'wb') as f:
f.write(request_a.content)
num += 1
def main():
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36'
}
url = 'https://www.vmgirls.com/3545.html'
html = get_html_text_one(url, headers)
get_images_one(html, headers)
def main2():
get_images_two()
def main3():
get_images_two_three()
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
9d822d260cb327e22781dc7abc5ad41d19df961c | 01c2254dcd0547058e66dd0ca0ecd9e2941e3cb9 | /billingstack/openstack/common/jsonutils.py | e8ab2d559752d171eeb769e1538b0bc0a47621dc | [
"Apache-2.0"
] | permissive | lanve/billingstack | 8ac3c52eca8f1d2dc46753d043ff09b55f536fcf | dfeb22992d503e1d407a55b7cf1a7658386fefa3 | refs/heads/master | 2021-01-22T15:22:08.731143 | 2014-10-31T10:21:35 | 2014-10-31T10:21:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,458 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
try:
import xmlrpclib
except ImportError:
# NOTE(jd): xmlrpclib is not shipped with Python 3
xmlrpclib = None
import six
from billingstack.openstack.common import gettextutils
from billingstack.openstack.common import importutils
from billingstack.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in value.iteritems())
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if xmlrpclib and isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
| [
"[email protected]"
] | |
ab19e7a013264bab618abc32f5c3b27d2161ed9d | 3ef70fe63acaa665e2b163f30f1abd0a592231c1 | /stackoverflow/venv/lib/python3.6/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/html5lib/_utils.py | 0703afb38b13bf56998d43aa542dcea9839d8132 | [
"MIT"
] | permissive | wistbean/learn_python3_spider | 14914b63691ac032955ba1adc29ad64976d80e15 | 40861791ec4ed3bbd14b07875af25cc740f76920 | refs/heads/master | 2023-08-16T05:42:27.208302 | 2023-03-30T17:03:58 | 2023-03-30T17:03:58 | 179,152,420 | 14,403 | 3,556 | MIT | 2022-05-20T14:08:34 | 2019-04-02T20:19:54 | Python | UTF-8 | Python | false | false | 4,015 | py | from __future__ import absolute_import, division, unicode_literals
from types import ModuleType
from pip._vendor.six import text_type
try:
import xml.etree.cElementTree as default_etree
except ImportError:
import xml.etree.ElementTree as default_etree
__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
"surrogatePairToCodepoint", "moduleFactoryFactory",
"supports_lone_surrogates"]
# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
# caught by the below test. In general this would be any platform
# using UTF-16 as its encoding of unicode strings, such as
# Jython. This is because UTF-16 itself is based on the use of such
# surrogates, and there is no mechanism to further escape such
# escapes.
try:
_x = eval('"\\uD800"') # pylint:disable=eval-used
if not isinstance(_x, text_type):
# We need this with u"" because of http://bugs.jython.org/issue2039
_x = eval('u"\\uD800"') # pylint:disable=eval-used
assert isinstance(_x, text_type)
except: # pylint:disable=bare-except
supports_lone_surrogates = False
else:
supports_lone_surrogates = True
class MethodDispatcher(dict):
"""Dict with 2 special properties:
On initiation, keys that are lists, sets or tuples are converted to
multiple keys so accessing any one of the items in the original
list-like object returns the matching value
md = MethodDispatcher({("foo", "bar"):"baz"})
md["foo"] == "baz"
A default value which can be set through the default attribute.
"""
def __init__(self, items=()):
# Using _dictEntries instead of directly assigning to self is about
# twice as fast. Please do careful performance testing before changing
# anything here.
_dictEntries = []
for name, value in items:
if isinstance(name, (list, tuple, frozenset, set)):
for item in name:
_dictEntries.append((item, value))
else:
_dictEntries.append((name, value))
dict.__init__(self, _dictEntries)
assert len(self) == len(_dictEntries)
self.default = None
def __getitem__(self, key):
return dict.get(self, key, self.default)
# Some utility functions to deal with weirdness around UCS2 vs UCS4
# python builds
def isSurrogatePair(data):
return (len(data) == 2 and
ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)
def surrogatePairToCodepoint(data):
char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
(ord(data[1]) - 0xDC00))
return char_val
# Module Factory Factory (no, this isn't Java, I know)
# Here to stop this being duplicated all over the place.
def moduleFactoryFactory(factory):
moduleCache = {}
def moduleFactory(baseModule, *args, **kwargs):
if isinstance(ModuleType.__name__, type("")):
name = "_%s_factory" % baseModule.__name__
else:
name = b"_%s_factory" % baseModule.__name__
kwargs_tuple = tuple(kwargs.items())
try:
return moduleCache[name][args][kwargs_tuple]
except KeyError:
mod = ModuleType(name)
objs = factory(baseModule, *args, **kwargs)
mod.__dict__.update(objs)
if "name" not in moduleCache:
moduleCache[name] = {}
if "args" not in moduleCache[name]:
moduleCache[name][args] = {}
if "kwargs" not in moduleCache[name][args]:
moduleCache[name][args][kwargs_tuple] = {}
moduleCache[name][args][kwargs_tuple] = mod
return mod
return moduleFactory
def memoize(func):
cache = {}
def wrapped(*args, **kwargs):
key = (tuple(args), tuple(kwargs.items()))
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return wrapped
| [
"[email protected]"
] | |
ec5e14ca7ee91e5a11c352f1f40d86319a8f3aca | 6e8f2e28479566dbaa338300b2d61f784ff83f97 | /.history/code/live_20210421141743.py | fc17bb0a93cda5b645c4b46e5443cb671d1f180d | [] | no_license | eeng5/CV-final-project | 55a7d736f75602858233ebc380c4e1d67ab2b866 | 580e28819560b86f6974959efb1d31ef138198fc | refs/heads/main | 2023-04-09T21:28:21.531293 | 2021-04-21T19:57:22 | 2021-04-21T19:57:22 | 352,703,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,640 | py | import os
import cv2
import sys
import numpy as np
from models import SimpleModel
from preprocess import Datasets
import hyperparameters as hp
import tensorflow as tf
from skimage.transform import resize
from PIL import Image, ImageFont, ImageDraw
from scipy.spatial import distance as dist
from imutils import face_utils
from imutils.video import VideoStream
import fastai
import fastai.vision
import imutils
import argparse
import time
import dlib
""" This file is a live video emotion detection application. To run simply activate the virtual environment in the code dirrectory via:
$ source cs14_30/bin/activate
Then run teh below command in the virtual environment:
$ python3 live.py
"""
class LiveApp:
def __init__(self, data_path):
def doLive(self):
weights_str = "/Users/elizabethwang/Desktop/CS1430/CV-final-project/code/checkpoints/simple_model/041321-113618/your.weights.e015-acc0.6121.h5"
os.chdir(sys.path[0])
model = SimpleModel()
model(tf.keras.Input(shape=(hp.img_size, hp.img_size,3)))
model.load_weights(weights_str, by_name=False)
model.compile(
optimizer=model.optimizer,
loss=model.loss_fn,
metrics=["sparse_categorical_accuracy"],
)
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
vs = VideoStream(src=0).start()
start = time.perf_counter()
data = []
time_value = 0
out = cv2.VideoWriter(
"liveoutput.avi", cv2.VideoWriter_fourcc("M", "J", "P", "G"), 10, (450, 253)
)
while True:
frame = vs.read()
frame = imutils.resize(frame, width=450)
gray = frame
#gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
face_coord = face_cascade.detectMultiScale(gray, 1.1, 5, minSize=(48, 48))
for coords in face_coord:
X, Y, w, h = coords
H, W, _ = frame.shape
X_1, X_2 = (max(0, X - int(w)), min(X + int(1.3 * w), W))
Y_1, Y_2 = (max(0, Y - int(0.1 * h)), min(Y + int(1.3 * h), H))
img_cp = gray[Y_1:Y_1+48, X_1:X_1+48].copy()
img_mod = createPixelArray(img_cp)
img_mod = np.expand_dims(img_mod, 0)
prediction = model.predict(img_mod)
p = np.argmax(prediction)
caption = ''
if (p == 0):
caption = 'Angry'
elif (p == 1):
caption = 'Disgust'
elif (p == 2):
caption = 'Fear'
elif (p == 3):
caption = 'Happy'
elif (p == 4):
caption = 'Sad'
elif (p == 5):
caption = 'Surprise'
elif (p == 6):
caption = 'Neutral'
cv2.rectangle(
img=frame,
pt1=(X_1, Y_1),
pt2=(X_2, Y_2),
color=(128, 128, 0),
thickness=2,
)
cv2.putText(
frame,
caption,
(10, frame.shape[0] - 25),
cv2.FONT_HERSHEY_SIMPLEX,
0.7,
(225, 255, 255),
2,)
cv2.imshow("frame", frame)
out.write(frame)
if cv2.waitKey(1) & 0xFF == ord("q"):
break
vs.stop()
out.release()
cv2.destroyAllWindows() | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.