blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c14936fab65953dbc7436882b80b1d347adc2081 | 62e4a186dc4b6294748ea6f1b6432219b5acf5ad | /backend/home/migrations/0001_load_initial_data.py | 46847f2ef86805729f40bbd28060eabdfb9c3bef | []
| no_license | crowdbotics-apps/tester-app-31668 | 60413ed775d07c8f958bb30e5398b1941722a8bb | d3e5ba380ee2041c07e73b7e8b45dbe3c9955a03 | refs/heads/master | 2023-09-01T07:48:38.823415 | 2021-10-26T02:03:42 | 2021-10-26T02:03:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "tester-app-31668.botics.co"
site_params = {
"name": "Tester App",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| [
"[email protected]"
]
| |
21df8591a5c6f1ead6ded6ad17b51db1a42ef0ca | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.0_rd=0.8_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=65/params.py | a4991794aa98f582d2bdb2947fdbb119be76f15a | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | {'cpus': 4,
'duration': 30,
'final_util': '2.016476',
'max_util': '2.0',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.8',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 65,
'utils': 'uni-medium-3'}
| [
"[email protected]"
]
| |
6dd1cc97f758079d3f409b647478f0d62e72da99 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_joey.py | bc33ba9a42bf180b251f994f39bb062ec82865e5 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py |
#calss header
class _JOEY():
def __init__(self,):
self.name = "JOEY"
self.definitions = [u'a young kangaroo']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
0eef22e43e4999b7e13da85bee5a3719f09f6108 | 13f4a06cd439f579e34bf38406a9d5647fe7a0f3 | /nn_ns/Bijection/BijectiveNumeration.py | d21afc99a4a5f19a282db7981ccf2d6d2a195f95 | []
| no_license | edt-yxz-zzd/python3_src | 43d6c2a8ef2a618f750b59e207a2806132076526 | 41f3a506feffb5f33d4559e5b69717d9bb6303c9 | refs/heads/master | 2023-05-12T01:46:28.198286 | 2023-05-01T13:46:32 | 2023-05-01T13:46:32 | 143,530,977 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,325 | py |
'''
https://en.wikipedia.org/wiki/Bijective_numeration
Radix <- PInt
BiDigit = [1..Radix]
# compare Digit = [0..Radix-1]
# little-endian
bidigitsLE2uint :: [BiDigit] -> UInt
bidigitsLE2uint ds = f ds 1 where
f [] weight = 0
f (h:ts) weight = h*weight + f ts (weight*Radix)
bidigitsLE2uint = f where
f [] = 0
f (h:ts) = h + (f ts) * Radix
bidivmod :: UInt -> PInt -> (UInt, PInt)
bidivmod n d = (q, r) where
# q = ceil(n/d) - 1 = floor((n+d-1)/d) - 1 = floor((n-1)/d)
q = (n-1)//d
r = n - q*d
uint2bidigitsLE :: UInt -> [BiDigit]
uint2bidigitsLE = f where
f 0 = []
f n = r : f q where
(q, r) = bidivmod n Radix
'''
__all__ = '''
bidigits2uint__little_endian
uint2bidigits__little_endian
'''.split()
from .ArbitraryRadixNumber import \
number2iter_arbitrary_radix_reprLE, arbitrary_radix_reprBE2number
def bidivmod(n, d):
#ssert n >= 0
#ssert d >= 1
q, r = divmod(n-1, d)
r += 1
return q, r
q = (n-1)//d
r = n - q*d
def uint2iter_bidigitsLE(radix, u):
assert u >= 0
assert radix >= 1 # need not 2
return number2iter_arbitrary_radix_reprLE(u, radix, 0, bidivmod)
''' bug: should comment below code to disable 'yield'!!
while u > 0:
u, r = bidivmod(u, radix)
yield r
'''
def uint2bidigitsLE(radix, u):
return tuple(uint2iter_bidigitsLE(radix, u))
def bidigitsLE2uint(radix, bidigits):
# little-endian
assert all(1<=d<=radix for d in bidigits)
return arbitrary_radix_reprBE2number(reversed(bidigits), radix, 0)
u = 0
for d in reversed(bidigits):
u *= radix
u += d
return u
bidigits2uint__little_endian = bidigitsLE2uint
uint2bidigits__little_endian = uint2bidigitsLE
def test():
for radix in range(1, 5):
for u in range(100):
bs = uint2bidigitsLE(radix, u)
u_ = bidigitsLE2uint(radix, bs)
#rint(u, bs, u_)
assert u == u_
from itertools import product
for radix in range(1, 5):
for L in range(5):
for bs in product(range(1, radix+1), repeat=L):
u = bidigitsLE2uint(radix, bs)
bs_ = uint2bidigitsLE(radix, u)
assert bs == bs_
if __name__ == '__main__':
print('test BijectiveNumeration.py')
test()
| [
"[email protected]"
]
| |
b10b08af4d84840fac55aead8c737624c5df79f2 | 5285d7071a74d8b56ad45a2a5b1f7d49ee672916 | /postexp/module.py | d6ac8cba41b03bee27c3afab06044373853111ca | []
| no_license | krishpranav/postexp | 9d4e1bf3be72ee27418449b4bc824ebdf9be1b44 | 348d7813a44585c9d34dc1d030380af0bd90a8f3 | refs/heads/master | 2023-04-20T04:33:32.173926 | 2021-05-06T05:23:02 | 2021-05-06T05:23:02 | 364,470,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | #!/usr/bin/env python3
# imports
import os
import pkg_resources
from importlib import import_module
INDEX_FILE = 'modindex.txt'
client_commands = {}
server_commands = {}
def client_handler(cmd):
def decorate(func):
client_commands[cmd] = func
return decorate
def server_handler(cmd):
def decorate(func):
server_commands[cmd] = func
return decorate
def load_modules():
for fname in pkg_resources.resource_string(__name__, INDEX_FILE).split():
if fname.endswith('.py'):
mod = os.path.splitext(fname)[0]
if mod == '__init__':
continue
elif mod in server_commands.keys():
raise Exception('duplicate module detected: {}'.format(mod))
import_module('modules.' + mod)
| [
"[email protected]"
]
| |
85b2e627fa812d32731797a4a20c53bbac3ec85c | d966694f2eb0fe09582716cf6ce60dba6f5370b8 | /B站/twoWEB表单/Flask_WTF_demo2.py | 6d57de3e3a23b889dded1637680484b45e2d9c68 | []
| no_license | lijianmin01/Flask_study | 217f1e9a7cd3511407144b6daa5cf13e962a6307 | 98639f471a88d5349a38564512f35c660057b9de | refs/heads/main | 2023-01-18T16:29:27.143408 | 2020-11-26T10:22:28 | 2020-11-26T10:22:28 | 315,005,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | from flask import Flask,render_template,request
app = Flask(__name__)
@app.route("/",methods=['POST','GET'])
def index():
message = None
if request.method == 'POST':
username = request.form.get("username")
password = request.form.get("password")
password2 = request.form.get("password2")
# 3、判断参数是否填写 & 密码是否相同
if not all([username,password,password2]):
print("参数不完成")
message = "参数不完整"
elif password2!=password:
message = "两次密码不一致"
else:
message = "success"
return render_template('index.html',message=message)
if __name__ == '__main__':
app.run(debug=True)
| [
"[email protected]"
]
| |
2bbc03087bb2db7a50467af5b4a2381fcdc265ce | 093b9569be9d1c4e5daf92efbebc38f680917b2d | /.history/base/views_20210829091127.py | 806d5e71eb0dda642f9ec037408d383e22ca9a91 | []
| no_license | Justin-Panagos/todoList | 95b1e97ff71af1b0be58e7f8937d726a687cea4d | 10539219b59fcea00f8b19a406db3d4c3f4d289e | refs/heads/master | 2023-08-04T13:27:13.309769 | 2021-08-29T14:06:43 | 2021-08-29T14:06:43 | 400,827,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,191 | py | from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView
from django.urls import reverse_lazy
from django.contrib.auth.views import LoginView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import login
from .models import Task
#login view
class CustoomLoginView(LoginView):
template_name = 'base/login.html'
fields = '__all__'
redirect_authenticated_user = True
def get_success_url(self):
return reverse_lazy('tasks')
#
class RegisterPage(FormView):
template_name = 'base/register.html'
form_class= UserCreationForm
redirect_authenticated_user = True
success_url = reverse_lazy('tasks')
def form_validate(self,form):
user= form.save()
if user is not None:
login(self.request, user)
return super(RegisterPage, self).form_validate(form)
class TaskList( LoginRequiredMixin, ListView):
model = Task
context_object_name = 'tasks'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['tasks'] = context['tasks'].filter(user=self.request.user)
context['count'] = context['tasks'].filter(complete=False).count()
return context
class TaskDetail(LoginRequiredMixin, DetailView):
model = Task
context_object_name = 'task'
template_name = 'base/task.html'
class TaskCreate(LoginRequiredMixin, CreateView):
model = Task
fields = ['title','description','complete']
success_url = reverse_lazy('tasks')
def form_valid(self, form):
form.instance.user = self.request.user
return super(TaskCreate, self).form_valid(form)
class TaskUpdate( LoginRequiredMixin, UpdateView):
model = Task
fields = ['title','description','complete']
success_url = reverse_lazy('tasks')
class TaskDelete(LoginRequiredMixin, DeleteView):
model = Task
context_object_name = 'task'
success_url = reverse_lazy('tasks') | [
"[email protected]"
]
| |
01c5946ab4cfc183f51a78e89ad9061896b00355 | bc531455ed161db04aedfa79f6daae32efefa321 | /benchmarks/datasets/openml_sylvine/info.py | 7f5014ae810696f8e166ade6a72d96e4a26c81ab | []
| no_license | mindsdb/benchmarks | b46f46f59047a2d3f6a0624addb3c281471d6092 | a122a85bb0124da8a469f8ef8baafdf9a70bfb5a | refs/heads/main | 2023-08-30T19:33:17.340246 | 2021-11-01T23:09:54 | 2021-11-01T23:09:54 | 302,911,061 | 5 | 23 | null | 2021-10-30T19:25:48 | 2020-10-10T13:44:16 | Python | UTF-8 | Python | false | false | 467 | py | from benchmarks.helpers.accuracy import balanced_accuracy_score, roc_auc
from benchmarks.datasets.dataset import DatasetInterface
class Dataset(DatasetInterface):
tags = ['classification']
learn_kwargs = {}
num_folds = 5
accuracy_functions = [balanced_accuracy_score, roc_auc]
file = 'data.csv'
active = True
target = 'class'
source = 'https://www.openml.org/d/41146'
license = 'Public Domain Mark 1.0'
is_open_license = True | [
"[email protected]"
]
| |
7943b4734e46916c0cd397e85da35134ba15a568 | d09c6ff7114f69a9326883c5b9fcc70fa994e8a2 | /_pycharm_skeletons/renderdoc/CaptureFile.py | 3d2006b1d1d1ce204a85a1fcb00cf6903d7e20b5 | [
"MIT"
]
| permissive | Lex-DRL/renderdoc-py-stubs | 3dd32d23c0c8219bb66387e6078244cff453cd83 | 75d280e4f500ded506f3315a49fc432b37ab4fa6 | refs/heads/master | 2020-08-22T16:55:39.336657 | 2019-11-03T01:21:26 | 2019-11-03T01:21:26 | 216,441,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,363 | py | # encoding: utf-8
# module renderdoc
# from P:\1-Scripts\_Python\Py-Autocomplete\renderdoc.pyd
# by generator 1.146
# no doc
# imports
import enum as __enum
from .CaptureAccess import CaptureAccess
class CaptureFile(CaptureAccess):
"""
A handle to a capture file. Used for simple cheap processing and meta-data fetching
without opening the capture for analysis.
"""
def Convert(self, filename, filetype, file, progress): # real signature unknown; restored from __doc__
"""
Convert(filename, filetype, file, progress)
Converts the currently loaded file to a given format and saves it to disk.
This allows converting a native RDC to another representation, or vice-versa converting another
representation back to native RDC.
:param str filename: The filename to save to.
:param str filetype: The format to convert to.
:param SDFile file: An optional :class:`SDFile` with the structured data to source from. This is
useful in case the format specifies that it doesn't need buffers, and you already have a
:class:`ReplayController` open with the structured data. This saves the need to load the file
again. If ``None`` then structured data will be fetched if not already present and used.
:param ProgressCallback progress: A callback that will be repeatedly called with an updated progress
value for the conversion. Can be ``None`` if no progress is desired.
:return: The status of the conversion operation, whether it succeeded or failed (and how it failed).
:rtype: ReplayStatus
"""
pass
def CopyFileTo(self, filename): # real signature unknown; restored from __doc__
"""
CopyFileTo(filename)
When a capture file is opened, an exclusive lock is held on the file on disk. This
makes it impossible to copy the file to another location at the user's request. Calling this
function will copy the file on disk to a new location but otherwise won't affect the capture handle.
The new file will be locked, the old file will be unlocked - to allow deleting if necessary.
It is invalid to call this function if :meth:`OpenFile` has not previously been called to open the
file.
:param str filename: The filename to copy to.
:return: ``True`` if the operation succeeded.
:rtype: ``bool``
"""
pass
def ErrorString(self): # real signature unknown; restored from __doc__
"""
ErrorString()
Returns the human-readable error string for the last error received.
The error string is not reset by calling this function so it's safe to call multiple times. However
any other function call may reset the error string to empty.
:return: The error string, if one exists, or an empty string.
:rtype: ``str``
"""
pass
def GetCaptureFileFormats(self): # real signature unknown; restored from __doc__
"""
GetCaptureFileFormats()
Returns the list of capture file formats.
:return: The list of capture file formats available.
:rtype: ``list`` of :class:`CaptureFileFormat`
"""
pass
def GetStructuredData(self): # real signature unknown; restored from __doc__
"""
GetStructuredData()
Returns the structured data for this capture.
The lifetime of this data is scoped to the lifetime of the capture handle, so it cannot be used
after the handle is destroyed.
:return: The structured data representing the file.
:rtype: SDFile
"""
pass
def GetThumbnail(self, type, maxsize): # real signature unknown; restored from __doc__
"""
GetThumbnail(type, maxsize)
Retrieves the embedded thumbnail from the capture.
.. note:: The only supported values for :paramref:`GetThumbnail.type` are :attr:`FileType.JPG`,
:attr:`FileType.PNG`, :attr:`FileType.TGA`, and :attr:`FileType.BMP`.
:param FileType type: The image format to convert the thumbnail to.
:param int maxsize: The largest width or height allowed. If the thumbnail is larger, it's resized.
:return: The raw contents of the thumbnail, converted to the desired type at the desired max
resolution.
:rtype: Thumbnail
"""
pass
def LocalReplaySupport(self): # real signature unknown; restored from __doc__
"""
LocalReplaySupport()
Queries for how well a particular capture is supported on the local machine.
If the file was opened with a format other than native ``rdc`` this will always return no
replay support.
:return: How much support for replay exists locally.
:rtype: ReplaySupport
"""
pass
def OpenBuffer(self, buffer, filetype, progress): # real signature unknown; restored from __doc__
"""
OpenBuffer(buffer, filetype, progress)
Initialises the file handle from a raw memory buffer.
This may be useful if you don't want to parse the whole file or already have the file in memory.
For the :paramref:`OpenBuffer.filetype` parameter, see :meth:`OpenFile`.
:param bytes buffer: The buffer containing the data to process.
:param str filetype: The format of the given file.
:param ProgressCallback progress: A callback that will be repeatedly called with an updated progress
value if an import step occurs. Can be ``None`` if no progress is desired.
:return: The status of the open operation, whether it succeeded or failed (and how it failed).
:rtype: ReplayStatus
"""
pass
def OpenCapture(self, opts, progress): # real signature unknown; restored from __doc__
"""
OpenCapture(opts, progress)
Opens a capture for replay locally and returns a handle to the capture. Only supported
for handles opened with a native ``rdc`` capture, otherwise this will fail.
This function will block until the capture is fully loaded and ready.
Once the replay is created, this :class:`CaptureFile` can be shut down, there is no dependency on it
by the :class:`ReplayController`.
:param ReplayOptions opts: The options controlling how the capture should be replayed.
:param ProgressCallback progress: A callback that will be repeatedly called with an updated progress
value for the opening. Can be ``None`` if no progress is desired.
:return: A tuple containing the status of opening the capture, whether success or failure, and the
resulting :class:`ReplayController` handle if successful.
:rtype: ``tuple`` of :class:`ReplayStatus` and :class:`ReplayController`.
"""
pass
def OpenFile(self, filename, filetype, progress): # real signature unknown; restored from __doc__
"""
OpenFile(filename, filetype, progress)
Initialises the capture handle from a file.
This method supports converting from non-native representations via structured data, by specifying
the input format in the :paramref:`OpenFile.filetype` parameter. The list of supported formats can be retrieved
by calling :meth:`GetCaptureFileFormats`.
``rdc`` is guaranteed to always be a supported filetype, and will be assumed if the filetype is
empty or unrecognised.
:param str filename: The filename of the file to open.
:param str filetype: The format of the given file.
:param ProgressCallback progress: A callback that will be repeatedly called with an updated progress
value if an import step occurs. Can be ``None`` if no progress is desired.
:return: The status of the open operation, whether it succeeded or failed (and how it failed).
:rtype: ReplayStatus
"""
pass
def RecordedMachineIdent(self): # real signature unknown; restored from __doc__
"""
RecordedMachineIdent()
Retrieves the identifying string describing what type of machine created this capture.
:return: A string identifying the machine ident used to make the capture.
:rtype: ``str``
"""
pass
def SetMetadata(self, driverName, machineIdent, thumbType, thumbWidth, thumbHeight, thumbData): # real signature unknown; restored from __doc__
"""
SetMetadata(driverName, machineIdent, thumbType, thumbWidth, thumbHeight, thumbData)
Sets the matadata for this capture handle.
This function may only be called if the handle is 'empty' - i.e. no file has been opened with
:meth:`OpenFile` or :meth:`OpenBuffer`.
.. note:: The only supported values for :paramref:`SetMetadata.thumbType` are :attr:`FileType.JPG`,
:attr:`FileType.PNG`, :attr:`FileType.TGA`, and :attr:`FileType.BMP`.
:param str driverName: The name of the driver. Must be a recognised driver name (even if replay
support for that driver is not compiled in locally.
:param int machineIdent: The encoded machine identity value. Optional value and can be left to 0, as
the bits to set are internally defined, so only generally useful if copying a machine ident from
an existing capture.
:param FileType thumbType: The file type of the thumbnail. Ignored if
:paramref:`SetMetadata.thumbData` is empty.
:param int thumbWidth: The width of the thumbnail. Ignored if :paramref:`SetMetadata.thumbData` is
empty.
:param int thumbHeight: The height of the thumbnail. Ignored if :paramref:`SetMetadata.thumbData` is
empty.
:param bytes thumbData: The raw data of the thumbnail. If empty, no thumbnail is set.
"""
pass
def SetStructuredData(self, file): # real signature unknown; restored from __doc__
"""
SetStructuredData(file)
Sets the structured data for this capture.
This allows calling code to populate a capture out of generated structured data. In combination with
:meth:`SetMetadata` this allows a purely in-memory creation of a file to be saved out with
:meth:`Convert`.
The data is copied internally so it can be destroyed after calling this function.
:param SDFile file: The structured data representing the file.
"""
pass
def Shutdown(self): # real signature unknown; restored from __doc__
"""
Shutdown()
Closes the file handle.
"""
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
this = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
thisown = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__dict__ = None # (!) real value is ''
| [
"[email protected]"
]
| |
670c01987b4fdddd3d153bd5c43d346725bfcaab | 319cd2055b4f69ad2a30c6d1785955bad0706015 | /config/acquisition_samc20.py | 39ce6e21f413369755b3c46f5697d986ae2de66b | [
"LicenseRef-scancode-unknown-license-reference",
"ISC",
"LicenseRef-scancode-public-domain"
]
| permissive | fb321/touch | c30f5a0ecef11f74d39392b23627d988c981a54f | d6a11f4ba34c8b08e0a5b624570480a082c7d2b9 | refs/heads/master | 2020-06-27T13:30:13.986106 | 2019-07-18T06:54:30 | 2019-07-18T06:54:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,997 | py | ################################################################################
#### Global Variables ####
################################################################################
global touchChannelSelf
global touchChannelMutual
ptcPinNode = ATDF.getNode("/avr-tools-device-file/devices/device/peripherals/module@[name=\"PTC\"]/instance/parameters")
ptcPinValues = []
ptcPinValues = ptcPinNode.getChildren()
touchChannelSelf = ptcPinValues[6].getAttribute("value")
touchChannelMutual = ptcPinValues[7].getAttribute("value")
def autoTuneFunc(symbol,event):
global touchAcqLibraryFile
global touchAcqAutoLibraryFile
if(event["value"] == 0):
touchAcqAutoLibraryFile.setEnabled(False)
touchAcqLibraryFile.setEnabled(True)
else:
touchAcqAutoLibraryFile.setEnabled(True)
touchAcqLibraryFile.setEnabled(False)
global touchAcqLibraryFile
global touchAcqAutoLibraryFile
############################################################################
#### Code Generation ####
############################################################################
# Library File
touchAcqLibraryFile = qtouchComponent.createLibrarySymbol("TOUCH_ACQ_LIB", None)
touchAcqLibraryFile.setSourcePath("/src/libraries/0x0020_qtm_samc20_acq.X.a")
touchAcqLibraryFile.setOutputName("0x0020_qtm_samc20_acq.X.a")
touchAcqLibraryFile.setDestPath("/touch/lib/")
touchAcqLibraryFile.setEnabled(True)
touchAcqLibraryFile.setDependencies(autoTuneFunc,["TUNE_MODE_SELECTED"])
# Library File
touchAcqAutoLibraryFile = qtouchComponent.createLibrarySymbol("TOUCH_ACQ_AUTO_LIB", None)
touchAcqAutoLibraryFile.setSourcePath("/src/libraries/0x0020_qtm_samc20_acq_auto.X.a")
touchAcqAutoLibraryFile.setOutputName("0x0020_qtm_samc20_acq_auto.X.a")
touchAcqAutoLibraryFile.setDestPath("/touch/lib/")
touchAcqAutoLibraryFile.setEnabled(False)
touchAcqAutoLibraryFile.setDependencies(autoTuneFunc,["TUNE_MODE_SELECTED"])
# Library File
touchBindLibraryFile = qtouchComponent.createLibrarySymbol("TOUCH_BIND_LIB", None)
touchBindLibraryFile.setSourcePath("/src/libraries/0x0005_qtm_binding_layer_cm0p.X.a")
touchBindLibraryFile.setOutputName("0x0005_qtm_binding_layer_cm0p.X.a")
touchBindLibraryFile.setDestPath("/touch/lib/")
touchBindLibraryFile.setEnabled(True)
# Header File
touchHeaderFile = qtouchComponent.createFileSymbol("TOUCH_ACQ_HEADER", None)
touchHeaderFile.setSourcePath("/src/qtm_acq_samc20_0x0020_api.h")
touchHeaderFile.setOutputName("qtm_acq_samc20_0x0020_api.h")
touchHeaderFile.setDestPath("/touch/")
touchHeaderFile.setProjectPath("config/" + configName + "/touch/")
touchHeaderFile.setType("HEADER")
touchHeaderFile.setMarkup(False)
# Header File
touchHeaderFile1 = qtouchComponent.createFileSymbol("TOUCH_ACQ_SAMC21_HEADER", None)
touchHeaderFile1.setSourcePath("/src/qtm_acq_samc21_0x0020_api.h")
touchHeaderFile1.setOutputName("qtm_acq_samc21_0x0020_api.h")
touchHeaderFile1.setDestPath("/touch/")
touchHeaderFile1.setProjectPath("config/" + configName + "/touch/")
touchHeaderFile1.setType("HEADER")
touchHeaderFile1.setMarkup(False)
# Header File
touchHeaderFile = qtouchComponent.createFileSymbol("TOUCH_BIND_HEADER", None)
touchHeaderFile.setSourcePath("/src/qtm_binding_layer_0x0005_api.h")
touchHeaderFile.setOutputName("qtm_binding_layer_0x0005_api.h")
touchHeaderFile.setDestPath("/touch/")
touchHeaderFile.setProjectPath("config/" + configName + "/touch/")
touchHeaderFile.setType("HEADER")
touchHeaderFile.setMarkup(False)
# Header File
touchHeaderFile = qtouchComponent.createFileSymbol("TOUCH_COMMON_HEADER", None)
touchHeaderFile.setSourcePath("/src/qtm_common_components_api.h")
touchHeaderFile.setOutputName("qtm_common_components_api.h")
touchHeaderFile.setDestPath("/touch/")
touchHeaderFile.setProjectPath("config/" + configName + "/touch/")
touchHeaderFile.setType("HEADER")
touchHeaderFile.setMarkup(False)
################################################################################
#### Component ####
################################################################################
#Set acquisition module id for the device
getModuleID = qtouchComponent.createStringSymbol("MODULE_ID", touchMenu)
getModuleID.setDefaultValue("0x0020")
getModuleID.setVisible(False)
#Set clock xml for the device
clockXml = qtouchComponent.createStringSymbol("CLOCK_XML", touchMenu)
clockXml.setDefaultValue("c21_clock_config")
clockXml.setVisible(False)
#Set PTC INTERRUPT HANDLER
Database.setSymbolValue("core", InterruptVector, True, 2)
Database.setSymbolValue("core", InterruptHandler, "PTC_Handler", 2)
#Set PTC PERIPHERAL CLOCK and Choose GCLK AS GCLK1
Database.clearSymbolValue("core", "PTC" + "_CLOCK_ENABLE")
Database.setSymbolValue("core", "PTC" + "_CLOCK_ENABLE", True, 2)
Database.clearSymbolValue("core", "GCLK_ID_37_GENSEL")
Database.setSymbolValue("core", "GCLK_ID_37_GENSEL", 1, 2)
#Set GCLK FOR PTC - GCLK1 AT 4MHZ
# Database.clearSymbolValue("core", "GCLK_INST_NUM1")
# Database.setSymbolValue("core", "GCLK_INST_NUM1", True, 2)
# Database.clearSymbolValue("core", "GCLK_1_DIV")
# Database.setSymbolValue("core", "GCLK_1_DIV", 12, 2)
acquisitionMenu = qtouchComponent.createMenuSymbol("ACQUISITION_MENU", touchMenu)
acquisitionMenu.setLabel("Acquisition Configuration")
# Sensing Technology
touchSenseTechnology = qtouchComponent.createKeyValueSetSymbol("SENSE_TECHNOLOGY", acquisitionMenu)
touchSenseTechnology.setLabel("Sensor Technology")
touchSenseTechnology.addKey("SelfCap", "NODE_SELFCAP", "Self Capacitance Sensing")
touchSenseTechnology.addKey("MutualCap", "NODE_MUTUAL", "Mutual Capacitance Sensing")
touchSenseTechnology.setDefaultValue(0)
touchSenseTechnology.setOutputMode("Value")
touchSenseTechnology.setDisplayMode("Description")
touchSenseTechnology.setDescription("Selects the sensor technology - Selfcap: Requires one pin per channel; Simple sensor design; Recommended for small number of sensors (less than 12). Mutualcap: Requires one X pin and one Y pin per channel; Can realize X x Y number of sensors in a matrix form; Recommended for large number of sensors (more than 12)")
totalChannelCountSelf = qtouchComponent.createIntegerSymbol("MAX_CHANNEL_COUNT_SELF",acquisitionMenu)
totalChannelCountSelf.setVisible(True)
totalChannelCountSelf.setDefaultValue(int(touchChannelSelf))
totalChannelCountMutl = qtouchComponent.createIntegerSymbol("MAX_CHANNEL_COUNT_MUTL",acquisitionMenu)
totalChannelCountMutl.setVisible(True)
totalChannelCountMutl.setDefaultValue(int(touchChannelMutual))
# Select Tuning mode
touchAutoTuneMode = qtouchComponent.createKeyValueSetSymbol("TUNE_MODE_SELECTED", acquisitionMenu)
touchAutoTuneMode.setLabel("Select the Required Tuning Mode")
touchAutoTuneMode.addKey("Manual Tuning","CAL_AUTO_TUNE_NONE","Manual tuning is done based on the values defined by user")
touchAutoTuneMode.addKey("Tune Resistor value","CAL_AUTO_TUNE_RSEL","Series Resistor is tuned")
touchAutoTuneMode.addKey("Tune CSD","CAL_AUTO_TUNE_CSD","Charge Share Delay - CSD is tuned")
touchAutoTuneMode.setDefaultValue(0)
touchAutoTuneMode.setOutputMode("Value")
touchAutoTuneMode.setDisplayMode("Key")
touchAutoTuneMode.setDescription("Sets the sensor calibration mode - CAL_AUTO_TUNE_NONE: Manual user setting of Prescaler, Charge share delay & Series resistor. AUTO_TUNE_CSD: QTouch library will use the configured prescaler and series resistor value and adjusts the CSD to ensure full charging.")
#Scan Rate (ms)
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val = qtouchComponent.createIntegerSymbol("DEF_TOUCH_MEASUREMENT_PERIOD_MS", acquisitionMenu)
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val.setLabel("Scan Rate (ms)")
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val.setDefaultValue(20)
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val.setMin(1)
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val.setMax(255)
touchSym_TOUCH_MEASUREMENT_PERIOD_MS_Val.setDescription("Defines the timer scan rate in milliseconds to initiate periodic touch measurement on all enabled touch sensors.")
#PTC Interrupt Priority
touchSym_PTC_INTERRUPT_PRIORITY_Val = qtouchComponent.createIntegerSymbol("DEF_PTC_INTERRUPT_PRIORITY", acquisitionMenu)
touchSym_PTC_INTERRUPT_PRIORITY_Val.setLabel("PTC Interrupt Priority")
touchSym_PTC_INTERRUPT_PRIORITY_Val.setDefaultValue(3)
touchSym_PTC_INTERRUPT_PRIORITY_Val.setMin(0)
touchSym_PTC_INTERRUPT_PRIORITY_Val.setMax(3)
touchSym_PTC_INTERRUPT_PRIORITY_Val.setDescription("Defines the interrupt priority for the PTC. Set low priority to PTC interrupt for applications having interrupt time constraints.")
#Acquisition Frequency
touchSym_SEL_FREQ_INIT_Val = qtouchComponent.createKeyValueSetSymbol("DEF_SEL_FREQ_INIT", acquisitionMenu)
touchSym_SEL_FREQ_INIT_Val.setLabel("Acquisition Frequency")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_0", "FREQ_SEL_0", "No additional clock cycles (Fastest measurement time) ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_1", "FREQ_SEL_1", "1 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_2", "FREQ_SEL_2", "2 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_3", "FREQ_SEL_3", "3 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_4", "FREQ_SEL_4", "4 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_5", "FREQ_SEL_5", "5 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_6", "FREQ_SEL_6", "6 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_7", "FREQ_SEL_7", "7 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_8", "FREQ_SEL_8", "8 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_9", "FREQ_SEL_9", "9 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_10", "FREQ_SEL_10", "10 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_11", "FREQ_SEL_11", "11 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_12", "FREQ_SEL_12", "12 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_13", "FREQ_SEL_13", "13 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_14", "FREQ_SEL_14", "14 additional clock cycles ")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_15", "FREQ_SEL_15", "15 additional clock cycles (Slowest measurement time")
touchSym_SEL_FREQ_INIT_Val.addKey("FREQ_16", "FREQ_SEL_SPREAD", "16 different frequencies used")
touchSym_SEL_FREQ_INIT_Val.setDefaultValue(0)
touchSym_SEL_FREQ_INIT_Val.setOutputMode("Value")
touchSym_SEL_FREQ_INIT_Val.setDisplayMode("Value")
touchSym_SEL_FREQ_INIT_Val.setDescription("It may be required to change the acquisition frequency if system noise frequency is closer to acquisition frequency.In order to vary the acquisition frequency, additional clock cycles are added during measurement for FREQ_SEL_0 through FREQ_SEL_15. FREQ_SEL_0 provides the fastest measurement time (no additional clock cycles are added) and FREQ_SEL_15 provides the slowest measurement time (15 additional clock cycles are added). When FREQ_SEL_SPREAD option is used, all the 16 frequencies are used consecutively in a circular fashion.")
| [
"http://support.microchip.com"
]
| http://support.microchip.com |
f9a367342ececac14729f37a9b60ab04e704c21a | 225a9d69ad0d1c4741afc42e17cb15e188a88959 | /page_counter/wsgi.py | 3473ab2f995ea3fc69f0d6135e0e9fb26e792f02 | []
| no_license | priyankaonly1/Page_Counter_project | 445bbef67739af299273433b3094775fd94a1aef | f2c8b6f24ca0b04fba38dec98329f53ffe7053d6 | refs/heads/main | 2023-06-02T03:50:40.374197 | 2021-06-19T10:45:57 | 2021-06-19T10:45:57 | 378,388,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for page_counter project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'page_counter.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
1759c3db5b289a6c16e38b0aee69020ea0b3073b | 8a452b71e3942d762fc2e86e49e72eac951b7eba | /leetcode/editor/en/[1827]Minimum Operations to Make the Array Increasing.py | b4069f64857e87c713a853fd9375da69f74b4076 | []
| no_license | tainenko/Leetcode2019 | 7bea3a6545f97c678a176b93d6622f1f87e0f0df | 8595b04cf5a024c2cd8a97f750d890a818568401 | refs/heads/master | 2023-08-02T18:10:59.542292 | 2023-08-02T17:25:49 | 2023-08-02T17:25:49 | 178,761,023 | 5 | 0 | null | 2019-08-27T10:59:12 | 2019-04-01T01:04:21 | JavaScript | UTF-8 | Python | false | false | 1,517 | py | # You are given an integer array nums (0-indexed). In one operation, you can
# choose an element of the array and increment it by 1.
#
#
# For example, if nums = [1,2,3], you can choose to increment nums[1] to make
# nums = [1,3,3].
#
#
# Return the minimum number of operations needed to make nums strictly
# increasing.
#
# An array nums is strictly increasing if nums[i] < nums[i+1] for all 0 <= i <
# nums.length - 1. An array of length 1 is trivially strictly increasing.
#
#
# Example 1:
#
#
# Input: nums = [1,1,1]
# Output: 3
# Explanation: You can do the following operations:
# 1) Increment nums[2], so nums becomes [1,1,2].
# 2) Increment nums[1], so nums becomes [1,2,2].
# 3) Increment nums[2], so nums becomes [1,2,3].
#
#
# Example 2:
#
#
# Input: nums = [1,5,2,4,1]
# Output: 14
#
#
# Example 3:
#
#
# Input: nums = [8]
# Output: 0
#
#
#
# Constraints:
#
#
# 1 <= nums.length <= 5000
# 1 <= nums[i] <= 10⁴
#
#
# Related Topics Array Greedy 👍 747 👎 34
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def minOperations(self, nums: List[int]) -> int:
if len(nums) <= 1:
return 0
total = 0
prev = nums[0]
for num in nums[1:]:
if num <= prev:
total += prev - num + 1
prev += 1
else:
prev = num
return total
# leetcode submit region end(Prohibit modification and deletion)
| [
"[email protected]"
]
| |
c21f8e2373910412ade1c825e69cddf3d7c54944 | c78aee514845f3614da8542139ffc792f00d73e5 | /lender_books/admin.py | b5a548331deb6730fed5f6266b245fd872de9e1a | []
| no_license | MaxMcF/django_lender | e38971fc9443d9b91c91eb6b961fec50ab541a0f | f58b3a0dd4211994ec911f41139016106fdebf61 | refs/heads/django_lender_init | 2020-03-28T21:59:17.793610 | 2018-09-21T00:44:20 | 2018-09-21T00:44:20 | 149,197,075 | 0 | 1 | null | 2018-09-21T00:44:21 | 2018-09-17T22:41:30 | Python | UTF-8 | Python | false | false | 111 | py | from django.contrib import admin
from .views import Book
# Register your models here.
admin.site.register(Book) | [
"[email protected]"
]
| |
05f78e87a40055254100ec782728294631795a38 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp146_1000.py | ccb3d7fa6937d4f199901c0d892341e0e1e92ed2 | []
| no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,759 | py | ITEM: TIMESTEP
1000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
-2.4396873997906852e+00 4.9639687399784862e+01
-2.4396873997906852e+00 4.9639687399784862e+01
-2.4396873997906852e+00 4.9639687399784862e+01
ITEM: ATOMS id type xs ys zs
754 1 0.131543 0.0569661 0.108709
954 1 0.996399 0.00836973 0.165502
1232 1 0.133867 0.0616647 0.211945
1331 1 0.168203 0.0356236 0.379444
2004 1 0.0412896 0.0704065 0.178708
830 1 0.85994 0.011778 0.385057
382 1 0.0469466 0.0260252 0.238517
751 1 0.119241 0.156419 0.113275
1620 1 0.258597 0.163734 0.088866
877 1 0.184952 0.191332 0.030084
2020 1 0.13463 0.229805 0.110594
729 1 0.219066 0.255994 0.191628
2010 1 0.253632 0.0992401 0.0433214
1625 1 0.803641 0.438945 0.0122924
38 1 0.97602 0.447146 0.231658
1380 1 0.184737 0.143906 0.0957399
181 1 0.953967 0.472743 0.157651
219 1 0.327038 0.0175488 0.16337
1420 1 0.40361 0.120508 0.132603
1448 1 0.439898 0.285514 0.157212
1870 1 0.358102 0.179522 0.140225
560 1 0.341244 0.155383 0.0719373
1126 1 0.260012 0.209956 0.161839
683 1 0.760087 0.481258 0.337562
1966 1 0.45857 0.21689 0.141666
985 1 0.602013 0.109566 0.14799
360 1 0.475788 0.0323015 0.0983319
120 1 0.942198 0.379323 0.416302
1848 1 0.390834 0.0668603 0.179889
11 1 0.51468 0.146451 0.0595643
1238 1 0.899243 0.411858 0.364585
1302 1 0.941241 0.0145735 0.468084
285 1 0.375528 0.202522 0.0247338
1840 1 0.4916 0.114754 0.163068
2016 1 0.599615 0.0367097 0.0749934
904 1 0.592627 0.226229 0.165732
1908 1 0.724624 0.0610981 0.0663879
1062 1 0.12557 0.426451 0.039405
845 1 0.717639 0.215605 0.109204
926 1 0.626209 0.482937 0.267891
1611 1 0.590781 0.162355 0.0868316
1371 1 0.817726 0.162338 0.177198
457 1 0.880882 0.123162 0.0253898
630 1 0.211637 0.439217 0.372212
1601 1 0.340022 0.275621 0.0152335
1068 1 0.546456 0.0502707 0.150334
1169 1 0.214597 0.432179 0.303347
633 1 0.889671 0.0818147 0.179966
1712 1 0.905961 0.0617307 0.0992408
528 1 0.95897 0.124563 0.075712
485 1 0.959019 0.0849179 0.15683
872 1 0.969624 0.0567795 0.247086
928 1 0.831693 0.0190165 0.144728
1748 1 0.904481 0.311925 0.00729044
1447 1 0.00939535 0.162258 0.0463054
760 1 0.83563 0.48776 0.106497
1086 1 0.966786 0.256146 0.00887936
1312 1 0.0544548 0.201376 0.126718
783 1 0.182941 0.298791 0.0966012
281 1 0.0691767 0.17265 0.208927
278 1 0.704434 0.462516 0.447781
1041 1 0.992788 0.243235 0.0793609
1377 1 0.255281 0.243339 0.0218709
1799 1 0.306774 0.487394 0.0239329
2009 1 0.230087 0.3533 0.0909346
1173 1 0.164775 0.369293 0.0380902
182 1 0.267014 0.287971 0.126403
1871 1 0.0660296 0.303683 0.082851
822 1 0.298685 0.232122 0.0848703
1389 1 0.284477 0.30561 0.0606747
1018 1 0.39379 0.325925 0.0260436
1222 1 0.32245 0.254375 0.158523
1581 1 0.307768 0.333433 0.177611
1697 1 0.334406 0.350214 0.108265
1 1 0.420429 0.183111 0.08622
703 1 0.308405 0.166454 0.0106836
400 1 0.471652 0.186312 0.214669
2040 1 0.528101 0.291257 0.0898104
733 1 0.594544 0.244111 0.0758183
337 1 0.389182 0.350081 0.168269
1083 1 0.455145 0.105216 0.0579619
334 1 0.395975 0.271541 0.0755292
368 1 0.523136 0.22389 0.0970289
605 1 0.257242 0.0744484 0.202268
1212 1 0.548932 0.218107 0.0274422
1975 1 0.854837 0.460721 0.394348
700 1 0.578692 0.311287 0.0245876
765 1 0.650276 0.1967 0.0350722
147 1 0.646419 0.316065 0.0931964
367 1 0.663755 0.253099 0.140771
1209 1 0.0270424 0.384202 0.447283
1111 1 0.58471 0.298783 0.141513
484 1 0.449775 0.326647 0.101044
662 1 0.825833 0.116966 0.0798783
588 1 0.73479 0.119079 0.159056
1402 1 0.622445 0.393467 0.0578763
1357 1 0.206465 0.0334293 0.215792
1109 1 0.777 0.22581 0.0636934
1860 1 0.13874 0.00817388 0.165751
1798 1 0.822016 0.265512 0.135623
1525 1 0.972156 0.183742 0.115269
209 1 0.830682 0.195024 0.112503
1050 1 0.844069 0.238457 0.0415509
1133 1 0.892443 0.148984 0.117869
185 1 0.919678 0.464907 0.437777
510 1 0.919334 0.367334 0.108345
1968 1 0.903481 0.230938 0.148253
1429 1 0.844162 0.225194 0.198149
774 1 0.340554 0.458109 0.335276
1715 1 0.0260237 0.459452 0.14776
93 1 0.996475 0.31558 0.0979436
1886 1 0.168236 0.38357 0.161959
1104 1 0.797693 0.29169 0.0265967
2014 1 0.979108 0.423027 0.0844146
610 1 0.888565 0.291773 0.0792767
734 1 0.966941 0.356834 0.169995
1971 1 0.0588224 0.386151 0.050368
304 1 0.103719 0.294853 0.142758
290 1 0.464488 0.24456 0.070125
1832 1 0.25118 0.0447708 0.463885
475 1 0.950997 0.13444 0.000161799
1332 1 0.164562 0.495646 0.0688792
350 1 0.45148 0.0378243 0.327661
1950 1 0.366843 0.0376102 0.487022
545 1 0.280313 0.420043 0.0653979
1107 1 0.12847 0.445427 0.153445
1138 1 0.0677509 0.495131 0.204158
1406 1 0.420088 0.0351302 0.253618
122 1 0.301199 0.361807 0.0187897
2025 1 0.305222 0.427245 0.138701
1452 1 0.294123 0.446682 0.212247
1976 1 0.355952 0.422017 0.0227735
1716 1 0.465443 0.305143 0.0235133
619 1 0.535812 0.469563 0.160991
935 1 0.459215 0.388493 0.160484
1354 1 0.54704 0.474983 0.405521
1789 1 0.396041 0.384502 0.102002
1164 1 0.65088 0.0319438 0.254457
625 1 0.742694 0.14437 0.480843
2026 1 0.730646 0.424808 0.148879
547 1 0.527822 0.393333 0.148229
1217 1 0.597189 0.474944 0.348816
1969 1 0.750263 0.399373 0.377744
503 1 0.594392 0.439228 0.112857
103 1 0.517255 0.3601 0.0302928
166 1 0.269094 0.0599222 0.124611
1557 1 0.777713 0.00341923 0.221728
660 1 0.391241 0.499821 0.36852
709 1 0.807222 0.34052 0.0814306
807 1 0.1218 0.124504 0.408635
675 1 0.371107 0.119431 0.0192769
868 1 0.674414 0.465381 0.119642
401 1 0.608927 0.477797 0.0444443
941 1 0.741568 0.387769 0.0678554
1596 1 0.75846 0.461642 0.0729977
1974 1 0.885009 0.42987 0.134905
2006 1 0.401536 0.0534926 0.0669341
217 1 0.922804 0.376322 0.0341458
738 1 0.681579 0.486694 0.373705
1928 1 0.166119 0.492223 0.263337
1731 1 0.979154 0.354427 0.273221
1440 1 0.832 0.417481 0.0739327
1813 1 0.844424 0.368284 0.0181357
1841 1 0.116742 0.0707015 0.335786
112 1 0.088152 0.0887044 0.266014
1588 1 0.167124 0.0479797 0.273879
327 1 0.363708 0.0547513 0.388847
1742 1 0.17407 0.141204 0.279744
1085 1 0.174508 0.109581 0.365829
1527 1 0.134568 0.174646 0.358744
97 1 0.486379 0.038124 0.021308
1594 1 0.172462 0.304791 0.239214
1473 1 0.106125 0.184254 0.28444
1687 1 0.1848 0.212448 0.300758
1346 1 0.07537 0.0522722 0.40832
1084 1 0.173133 0.1543 0.193018
792 1 0.369451 0.124165 0.27636
1116 1 0.324449 0.17527 0.301995
307 1 0.256846 0.140424 0.252073
168 1 0.439153 0.108924 0.218739
118 1 0.351761 0.139399 0.201705
977 1 0.352584 0.163142 0.381386
50 1 0.294069 0.139534 0.151592
1318 1 0.796691 0.0529296 0.0638918
8 1 0.466081 0.045571 0.177947
607 1 0.454021 0.149715 0.496494
541 1 0.20973 0.438143 0.0229253
1011 1 0.608429 0.148798 0.336225
54 1 0.325302 0.0429154 0.237947
2000 1 0.543947 0.170732 0.148609
1482 1 0.507602 0.0918762 0.24704
1510 1 0.494718 0.163207 0.27948
1843 1 0.0779186 0.482726 0.408348
584 1 0.683529 0.364094 0.0256764
1603 1 0.660442 0.0505068 0.108708
1513 1 0.141254 0.0503237 0.467675
1537 1 0.570524 0.0353237 0.236519
1541 1 0.621053 0.100588 0.25327
1746 1 0.714845 0.0593076 0.242342
1326 1 0.767305 0.0292027 0.291755
1740 1 0.822102 0.0658899 0.217431
65 1 0.570926 0.359324 0.0909979
1008 1 0.81173 0.0892876 0.144263
1344 1 0.770325 0.109265 0.253468
671 1 0.807003 0.193222 0.266822
798 1 0.760994 0.0962337 0.330529
1752 1 0.0725104 0.0448208 0.0254719
1851 1 0.33954 0.0750502 0.124232
1030 1 0.721719 0.00158784 0.483032
511 1 0.684693 0.459725 0.0478112
1900 1 0.0204524 0.114514 0.25128
938 1 0.855914 0.138988 0.232742
882 1 0.914188 0.160668 0.192243
757 1 0.891867 0.078941 0.267661
1978 1 0.879789 0.480341 0.32368
1463 1 0.953699 0.141751 0.25224
1954 1 0.894892 0.146928 0.304784
1462 1 0.0391694 0.17129 0.295448
1464 1 0.00351337 0.240119 0.15746
124 1 0.0361097 0.380058 0.12829
1624 1 0.0783461 0.405006 0.187937
1686 1 0.0754693 0.259237 0.193777
1277 1 0.0268767 0.306698 0.162235
1013 1 0.954922 0.192271 0.302966
983 1 0.175932 0.23999 0.423448
1424 1 0.128835 0.270144 0.293807
330 1 0.0583607 0.248269 0.322373
516 1 0.0522174 0.306573 0.275575
1307 1 0.204582 0.319656 0.16426
201 1 0.219925 0.205366 0.371017
486 1 0.146892 0.239868 0.218902
1619 1 0.0917267 0.33311 0.214453
1230 1 0.2449 0.216597 0.255257
1069 1 0.249091 0.231251 0.449861
595 1 0.241135 0.293203 0.270848
1704 1 0.12948 0.348367 0.279597
1136 1 0.283477 0.240413 0.360616
1214 1 0.329436 0.200316 0.23241
1855 1 0.423971 0.173846 0.264661
1549 1 0.323921 0.253974 0.294748
518 1 0.322021 0.328049 0.291591
649 1 0.389428 0.215098 0.195163
2042 1 0.387099 0.244182 0.263057
36 1 0.514466 0.268954 0.1705
90 1 0.463578 0.33413 0.210291
1065 1 0.516912 0.32285 0.266554
445 1 0.64101 0.242307 0.220813
767 1 0.467143 0.254455 0.246864
328 1 0.552557 0.202374 0.345391
460 1 0.483055 0.233992 0.333059
1864 1 0.528517 0.382325 0.314848
1850 1 0.659603 0.177639 0.107813
1066 1 0.564489 0.136511 0.212255
1659 1 0.656466 0.274433 0.291312
502 1 0.520559 0.354307 0.38279
438 1 0.761571 0.198351 0.188951
1099 1 0.67552 0.172936 0.186899
1957 1 0.539194 0.221274 0.221725
2015 1 0.686967 0.308087 0.179884
711 1 0.626764 0.375524 0.16839
99 1 0.668413 0.184001 0.297176
1622 1 0.728576 0.312175 0.0753459
374 1 0.751429 0.282891 0.144674
418 1 0.744817 0.175936 0.321751
1691 1 0.832379 0.257275 0.29894
747 1 0.703641 0.371514 0.226705
613 1 0.704586 0.247635 0.354101
1202 1 0.786337 0.274882 0.213706
59 1 0.894609 0.316506 0.160411
795 1 0.929344 0.415192 0.19024
501 1 0.988543 0.14773 0.186539
1494 1 0.974101 0.212848 0.230816
1643 1 0.832313 0.330919 0.261765
1091 1 0.806748 0.409803 0.201528
1137 1 0.897297 0.226806 0.262265
1439 1 0.973033 0.394825 0.347077
854 1 0.924118 0.28805 0.236679
1662 1 0.833239 0.134393 0.458669
1695 1 0.0943126 0.493418 0.269269
1920 1 0.125513 0.423332 0.263588
2028 1 0.0284243 0.449813 0.322068
1067 1 0.0377547 0.359133 0.325625
524 1 0.138966 0.419195 0.347523
909 1 0.293188 0.405139 0.371372
141 1 0.269962 0.37143 0.294831
1087 1 0.173658 0.428131 0.0969269
1250 1 0.13399 0.357683 0.109238
1501 1 0.26138 0.342639 0.365048
1001 1 0.328917 0.29747 0.36789
253 1 0.221719 0.38019 0.222789
594 1 0.182732 0.279299 0.357358
1386 1 0.248035 0.376622 0.161375
1410 1 0.288224 0.26596 0.222077
833 1 0.392783 0.315197 0.239434
548 1 0.474952 0.501505 0.358324
101 1 0.331612 0.382539 0.241952
1795 1 0.370409 0.439722 0.187555
1911 1 0.226362 0.452937 0.150345
1828 1 0.367743 0.451785 0.271463
816 1 0.392639 0.377067 0.311102
1837 1 0.435369 0.478513 0.113565
1327 1 0.956884 0.0838727 0.414968
1213 1 0.476877 0.433255 0.300377
1122 1 0.507495 0.476665 0.235242
761 1 0.445195 0.440021 0.233565
83 1 0.508954 0.388549 0.229236
176 1 0.554035 0.440718 0.279082
974 1 0.471437 0.482069 0.0340708
1336 1 0.586154 0.358132 0.224261
362 1 0.639683 0.407912 0.247842
1959 1 0.679701 0.453781 0.309637
213 1 0.461349 0.404746 0.0581046
1568 1 0.606684 0.389253 0.401979
200 1 0.582086 0.444331 0.209454
1215 1 0.83061 0.423922 0.332989
1304 1 0.736308 0.349902 0.168035
74 1 0.71069 0.0526883 0.407949
1896 1 0.672773 0.353265 0.297592
145 1 0.757834 0.455226 0.236723
1618 1 0.796696 0.436411 0.474196
343 1 0.744936 0.23388 0.270594
105 1 0.81867 0.333375 0.183008
1809 1 0.740483 0.394895 0.284867
523 1 0.759801 0.319271 0.281738
1131 1 0.868936 0.448388 0.207576
472 1 0.226095 0.0858644 0.29898
1039 1 0.134137 0.113193 0.0481075
338 1 0.903236 0.352255 0.299432
1325 1 0.0466733 0.444145 0.25158
945 1 0.952393 0.479333 0.321725
1595 1 0.970057 0.0494909 0.0521892
812 1 0.125405 0.204352 0.461581
869 1 0.0662678 0.122352 0.354907
1130 1 0.00467328 0.176144 0.371114
705 1 0.618657 0.422496 0.486653
878 1 0.875599 0.433827 0.00732322
1688 1 0.0250686 0.127864 0.124375
1426 1 0.17828 0.161207 0.423909
18 1 0.256648 0.113379 0.484193
1931 1 0.251429 0.163725 0.324882
669 1 0.909715 0.00395544 0.0359286
1538 1 0.640082 0.127525 0.0501029
609 1 0.199451 0.0938088 0.433305
735 1 0.203436 0.47825 0.469292
1649 1 0.308682 0.103982 0.321153
839 1 0.148269 0.495714 0.00289412
1374 1 0.794381 0.230847 0.492816
448 1 0.818491 0.472636 0.277654
361 1 0.370126 0.0379218 0.311594
20 1 0.274678 0.476336 0.382286
1889 1 0.330329 0.124945 0.462664
1535 1 0.231526 0.492138 0.215787
1153 1 0.542819 0.0696146 0.0418338
123 1 0.929113 0.195717 0.0462921
1598 1 0.267565 0.153931 0.409287
1677 1 0.492617 0.155488 0.422033
522 1 0.458316 0.0769209 0.481182
893 1 0.511743 0.135092 0.343868
1288 1 0.527285 0.0419038 0.310299
1148 1 0.394092 0.119957 0.435897
1308 1 0.246994 0.0912312 0.372997
238 1 0.488 0.0509809 0.411679
447 1 0.616051 0.0499452 0.324747
1717 1 0.734876 0.0313478 0.142731
925 1 0.570734 0.0879969 0.37136
922 1 0.984971 0.334926 0.0292136
566 1 0.689518 0.254598 0.471968
620 1 0.794001 0.0426474 0.446665
126 1 0.64885 0.156044 0.460475
76 1 0.64819 0.0662679 0.181344
291 1 0.69434 0.0540174 0.323311
1627 1 0.629059 0.230716 0.348279
791 1 0.886768 0.276294 0.334738
1205 1 0.566001 0.271333 0.46884
218 1 0.446174 0.162006 0.015276
932 1 0.786643 0.0230101 0.367154
58 1 0.859761 0.11401 0.388002
780 1 0.765197 0.106515 0.418678
1283 1 0.531173 0.50117 0.313758
653 1 0.871285 0.181713 0.42707
1049 1 0.970483 0.113132 0.314776
589 1 0.926695 0.055009 0.354073
664 1 0.947117 0.159513 0.419612
98 1 0.892804 0.0691048 0.448218
1753 1 0.0266776 0.128756 0.429662
784 1 0.0436996 0.0277802 0.350981
1048 1 0.659196 0.0403125 0.457604
647 1 0.0149527 0.0589979 0.45318
1092 1 0.521898 0.488213 0.47586
1836 1 0.191812 0.349952 0.328501
910 1 0.978588 0.255092 0.37563
719 1 0.997578 0.326253 0.373147
1733 1 0.0529723 0.204583 0.450149
499 1 0.071532 0.228574 0.387259
469 1 0.0701017 0.335345 0.415593
318 1 0.978151 0.280505 0.307553
1224 1 0.55727 0.414287 0.0240366
243 1 0.161301 0.314867 0.432403
446 1 0.222503 0.305621 0.487688
321 1 0.551302 0.00278418 0.385145
659 1 0.260924 0.290388 0.418902
62 1 0.270626 0.00380005 0.396474
652 1 0.343548 0.283743 0.448882
861 1 0.398613 0.291379 0.385114
1685 1 0.444253 0.170097 0.352043
1367 1 0.474189 0.3189 0.420665
1305 1 0.217904 0.318934 0.0183018
254 1 0.368337 0.228529 0.352593
558 1 0.395038 0.199464 0.425955
87 1 0.323689 0.218637 0.421379
1690 1 0.86697 0.394066 0.434344
559 1 0.559488 0.109905 0.294302
1394 1 0.970076 0.136648 0.48311
1932 1 0.543898 0.288145 0.376661
392 1 0.205363 0.0461687 0.073025
1991 1 0.410649 0.00707297 0.434889
269 1 0.567382 0.163554 0.420719
521 1 0.631416 0.224007 0.444335
1629 1 0.482721 0.243009 0.438301
616 1 0.700576 0.12544 0.361936
1036 1 0.687691 0.187265 0.399052
163 1 0.0927169 0.165698 0.0218274
894 1 0.634928 0.0912219 0.404338
326 1 0.720993 0.0769563 0.47842
1660 1 0.624767 0.475034 0.422211
1253 1 0.773534 0.196663 0.414993
634 1 0.774509 0.461719 0.408628
667 1 0.563127 0.272531 0.301374
2027 1 0.632909 0.324311 0.350029
204 1 0.722952 0.39283 0.444848
891 1 0.770052 0.305517 0.422074
912 1 0.83271 0.242106 0.416276
1262 1 0.811299 0.170229 0.357741
718 1 0.779272 0.2406 0.357341
364 1 0.836723 0.0944977 0.300684
699 1 0.77573 0.474516 0.153714
1125 1 0.171048 0.397309 0.486253
1953 1 0.586024 0.0645586 0.44693
1942 1 0.911073 0.344429 0.480406
529 1 0.980339 0.222786 0.457259
305 1 0.729779 0.32454 0.348196
1488 1 0.893542 0.214512 0.485777
236 1 0.28604 0.449874 0.292138
395 1 0.896941 0.194365 0.356592
41 1 0.910685 0.253158 0.41498
1301 1 0.81533 0.324442 0.367182
1121 1 0.959299 0.301924 0.438142
1521 1 0.0299349 0.282116 0.452787
258 1 0.68878 0.457586 0.193158
314 1 0.0670174 0.400786 0.372882
815 1 0.108727 0.27322 0.465698
695 1 0.123049 0.342977 0.361168
1255 1 0.124102 0.391284 0.418094
1826 1 0.167293 0.493123 0.339631
2045 1 0.993131 0.449173 0.427009
787 1 0.146105 0.4634 0.425893
1804 1 0.716321 0.132381 0.019776
1901 1 0.129558 0.297275 0.0343852
477 1 0.333283 0.402809 0.473864
398 1 0.528315 0.436467 0.0876338
1150 1 0.297287 0.353251 0.432904
550 1 0.365043 0.446754 0.104295
712 1 0.352071 0.360332 0.374233
611 1 0.22278 0.355387 0.430402
1034 1 0.915507 0.333201 0.374117
601 1 0.534476 0.120054 0.482033
1528 1 0.386984 0.306362 0.313687
1516 1 0.399499 0.356995 0.441035
1774 1 0.37504 0.485209 0.452604
1566 1 0.450505 0.45987 0.421164
1696 1 0.381303 0.432413 0.394515
473 1 0.866897 0.309296 0.420284
479 1 0.447551 0.104068 0.293616
275 1 0.446985 0.37231 0.376924
1563 1 0.184253 0.0917133 0.155709
32 1 0.0949901 0.399273 0.489018
946 1 0.548719 0.417994 0.369263
885 1 0.566388 0.351014 0.463256
434 1 0.0539718 0.0176064 0.0971188
776 1 0.487477 0.396458 0.434295
329 1 0.467552 0.312875 0.34066
104 1 0.846746 0.398929 0.26784
24 1 0.922378 0.424048 0.275152
1882 1 0.788182 0.160618 0.00861218
192 1 0.626433 0.317237 0.495846
222 1 0.618659 0.319925 0.426788
2034 1 0.682494 0.386466 0.365663
1940 1 0.59857 0.389228 0.306865
189 1 0.414732 0.264976 0.485424
1002 1 0.696331 0.303659 0.413489
28 1 0.0411284 0.468064 0.0800479
642 1 0.28946 0.0251398 0.300516
1446 1 0.328448 0.058718 0.0346325
1989 1 0.0355376 0.471371 0.00795925
49 1 0.853699 0.28533 0.483139
1317 1 0.440147 0.423418 0.489693
85 1 0.490799 0.340249 0.494657
702 1 0.451098 0.496045 0.286563
1023 1 0.211393 0.00487594 0.139162
1767 1 0.291061 0.272022 0.496619
214 1 0.834934 0.359115 0.498951
25 1 0.0659503 0.133919 0.491207
1823 1 0.578602 0.153949 0.0153397
802 1 0.12226 0.465089 0.497528
1763 1 0.0110694 0.462306 0.492096
2023 1 0.735878 0.00774488 0.00379596
1929 1 0.0974077 0.0581636 0.630021
1375 1 0.197515 0.323704 0.553782
779 1 0.0633903 0.124212 0.645237
261 1 0.152874 0.182787 0.558678
1508 1 0.0189959 0.115938 0.705909
937 1 0.913255 0.485649 0.68742
1384 1 0.648667 0.272196 0.613747
1551 1 0.0351263 0.07461 0.572931
1727 1 0.0783281 0.496149 0.892398
963 1 0.298815 0.0629225 0.67139
10 1 0.218336 0.0587728 0.625967
788 1 0.111941 0.0946707 0.526764
308 1 0.23859 0.103312 0.557911
936 1 0.898845 0.134271 0.510574
817 1 0.264691 0.0550718 0.96784
657 1 0.282811 0.13636 0.610764
111 1 0.574779 0.0620728 0.653865
940 1 0.374534 0.164883 0.508399
1334 1 0.513348 0.157642 0.558362
153 1 0.5004 0.0866598 0.643358
1005 1 0.348723 0.0718253 0.589209
898 1 0.92744 0.384005 0.948164
1839 1 0.841469 0.430844 0.944902
1330 1 0.54632 0.0111691 0.889831
889 1 0.579812 0.200525 0.511057
596 1 0.241119 0.0515924 0.711069
46 1 0.555798 0.12324 0.607694
1124 1 0.496872 0.165492 0.664059
1960 1 0.590629 0.107656 0.87281
1683 1 0.595942 0.14526 0.673284
245 1 0.217995 0.49763 0.803904
720 1 0.602624 0.0388376 0.510142
237 1 0.791058 0.0396439 0.644226
1445 1 0.727195 0.190318 0.54347
1905 1 0.761376 0.172997 0.711293
831 1 0.69776 0.0775178 0.655248
1779 1 0.679189 0.155451 0.679283
656 1 0.956338 0.484718 0.625062
1946 1 0.636241 0.0930749 0.609522
1817 1 0.762233 0.00681787 0.766833
1815 1 0.794913 0.0983616 0.51459
252 1 0.796184 0.057892 0.715005
215 1 0.5701 0.464824 0.526866
931 1 0.776537 0.117431 0.655446
279 1 0.763991 0.450687 0.944794
462 1 0.787403 0.148398 0.574996
1869 1 0.858159 0.171812 0.618644
1499 1 0.754865 0.0769328 0.574207
175 1 0.838423 0.167215 0.684651
131 1 0.953807 0.193678 0.53701
142 1 0.915545 0.105452 0.598429
1243 1 0.415848 0.474734 0.677881
1266 1 0.855552 0.0423368 0.545947
2 1 0.977565 0.128608 0.652503
1225 1 0.00593165 0.139412 0.564601
508 1 0.947318 0.0685682 0.529182
1341 1 0.830529 0.0938478 0.601245
710 1 0.138232 0.0114659 0.998214
302 1 0.240742 0.185776 0.531396
543 1 0.100549 0.188616 0.648408
2037 1 0.68882 -0.00119325 0.773945
988 1 0.0797297 0.156224 0.57356
1996 1 0.109449 0.27173 0.546005
1057 1 0.0814624 0.207375 0.51719
685 1 0.065954 0.223192 0.58772
1868 1 0.158107 0.090628 0.598315
1721 1 0.327926 0.204897 0.637041
1849 1 0.278304 0.272689 0.660406
1103 1 0.18247 0.171891 0.621592
262 1 0.139673 0.236036 0.608677
1694 1 0.447494 0.217611 0.65285
353 1 0.876112 0.0207961 0.693261
1080 1 0.245517 0.291134 0.597391
900 1 0.42643 0.0239833 0.535392
1236 1 0.355826 0.254326 0.70363
196 1 0.400884 0.275181 0.623467
538 1 0.451159 0.23902 0.569999
1190 1 0.242376 0.347203 0.672018
1902 1 0.155475 0.432131 0.550391
15 1 0.347677 0.321321 0.653239
991 1 0.539094 0.201786 0.761125
1498 1 0.520468 0.225802 0.609106
962 1 0.559291 0.116826 0.765456
1282 1 0.558924 0.249864 0.67314
132 1 0.489646 0.298064 0.591897
1249 1 0.454602 0.162109 0.596747
225 1 0.525225 0.281176 0.527019
1574 1 0.517067 0.428053 0.576248
1006 1 0.411282 0.375613 0.755047
316 1 0.651487 0.340832 0.563273
1486 1 0.58748 0.204427 0.606512
1026 1 0.72466 0.267783 0.531337
396 1 0.663113 0.194183 0.61496
2019 1 0.681284 0.225878 0.676275
1450 1 0.597073 0.267876 0.544404
1592 1 0.579002 0.463195 0.596536
48 1 0.753237 0.243068 0.693082
1022 1 0.724682 0.149685 0.606059
1046 1 0.786105 0.186953 0.639742
378 1 0.784371 0.305722 0.514199
1925 1 0.76138 0.311726 0.639898
1884 1 0.817066 0.309053 0.70774
1378 1 0.802779 0.235501 0.563941
1833 1 0.937173 0.265629 0.514375
1722 1 0.881821 0.24946 0.581656
1134 1 0.823776 0.232829 0.72437
1398 1 0.816875 0.249994 0.644707
280 1 0.72784 0.251798 0.610827
897 1 0.0145867 0.188073 0.627993
1425 1 0.95702 0.2935 0.596759
1000 1 0.932523 0.183716 0.634661
952 1 0.669769 0.119098 0.537944
1874 1 0.962412 0.238311 0.674792
487 1 0.946924 0.344376 0.676218
1880 1 0.884208 0.234789 0.662857
544 1 0.0278449 0.388148 0.522269
1852 1 0.103644 0.371806 0.554434
381 1 0.0425261 0.390008 0.601515
1495 1 0.955427 0.388921 0.861518
574 1 0.863874 0.483389 0.749419
1562 1 0.223105 0.394141 0.539365
1320 1 0.0481151 0.289733 0.578159
1422 1 0.0914495 0.443975 0.694294
741 1 0.270717 0.366368 0.606886
533 1 0.4045 0.00638281 0.718702
920 1 0.115603 0.316937 0.603183
623 1 0.292192 0.421006 0.557967
1185 1 0.321376 0.292638 0.590038
1938 1 0.28169 0.489616 0.648937
648 1 0.110718 0.436428 0.615431
1577 1 0.301443 0.40421 0.660062
1456 1 0.141547 0.323718 0.509688
1623 1 0.137654 0.22783 0.986904
847 1 0.353555 0.389717 0.53886
1093 1 0.290929 0.0244396 0.785546
1007 1 0.126629 0.456351 0.801226
1862 1 0.000920039 0.0733466 0.973295
257 1 0.555658 0.353183 0.545167
1206 1 0.719666 0.355868 0.519309
208 1 0.335751 0.485136 0.600465
1492 1 0.253618 0.186899 0.945015
1267 1 0.546161 0.370392 0.625706
3 1 0.106753 0.404543 0.748231
1505 1 0.421499 0.398614 0.563956
79 1 0.440297 0.464157 0.58549
809 1 0.467647 0.407236 0.623206
604 1 0.375679 0.232735 0.555615
1602 1 0.17034 0.244592 0.519965
1783 1 0.534555 0.439238 0.672273
471 1 0.703651 0.383691 0.623042
403 1 0.92323 0.00932198 0.95406
1480 1 0.834607 0.400741 0.593349
480 1 0.71782 0.319223 0.586378
1416 1 0.0619124 0.0476776 0.511998
61 1 0.821782 0.310427 0.57828
292 1 0.74742 0.44495 0.598011
115 1 0.859225 0.369103 0.666869
1052 1 0.724538 0.0132575 0.684962
964 1 0.765502 0.371966 0.567826
1361 1 0.960757 0.435257 0.56131
1741 1 0.950882 0.373498 0.604871
1028 1 0.162843 0.0155428 0.653614
1388 1 0.649852 0.209191 0.529513
890 1 0.892114 0.343161 0.572931
1793 1 0.0419163 0.456501 0.566982
1958 1 0.296901 0.476055 0.919882
159 1 0.896387 0.418718 0.622709
554 1 0.106632 0.452042 0.96441
481 1 0.131259 0.0248479 0.733174
1881 1 0.298493 0.227138 0.576914
86 1 0.00273856 0.0487141 0.657304
1972 1 0.119342 0.117254 0.774285
43 1 0.0306308 0.0346203 0.850269
1710 1 0.080935 0.0703547 0.703488
786 1 0.0222308 0.177465 0.814212
899 1 0.747798 0.495103 0.874351
230 1 0.0240842 0.102461 0.782651
749 1 0.384532 0.433796 0.893078
766 1 0.310858 0.0977072 0.755318
422 1 0.17215 0.088306 0.736842
1773 1 0.247634 0.14834 0.743289
1790 1 0.280645 0.139349 0.816509
532 1 0.345747 0.115861 0.649478
13 1 0.498694 0.425967 0.986002
1114 1 0.197526 0.0277496 0.777064
386 1 0.370217 0.0457224 0.670926
1990 1 0.314674 0.186615 0.710823
781 1 0.590015 0.0310409 0.795061
1671 1 0.445527 0.043808 0.673155
1176 1 0.369402 0.117148 0.717812
227 1 0.184669 0.0599745 0.523262
1414 1 0.542238 0.0513549 0.584935
871 1 0.389241 0.0140282 0.610875
1646 1 0.683361 0.473737 0.742351
205 1 0.881101 0.193236 0.98649
1506 1 0.471919 0.033048 0.598809
1009 1 0.525197 0.0657967 0.834867
848 1 0.29912 0.0528759 0.518005
1274 1 0.634917 0.0974357 0.740759
1218 1 0.554209 0.216739 0.833105
344 1 0.508477 0.0301584 0.742949
1919 1 0.642466 0.041592 0.692911
1943 1 0.588095 0.477217 0.787083
268 1 0.654862 0.159421 0.883775
876 1 0.613865 0.161665 0.811225
628 1 0.722285 0.119331 0.870708
455 1 0.676711 0.119471 0.81281
555 1 0.702398 0.160663 0.761764
573 1 0.681032 0.0466673 0.875558
1042 1 0.615704 0.186142 0.738488
1520 1 0.726414 0.0742336 0.724164
1543 1 0.840233 0.0469492 0.774245
916 1 0.752117 0.123942 0.788943
1933 1 0.827187 0.104459 0.817108
1700 1 0.905808 0.019454 0.759873
687 1 0.820467 0.159136 0.764756
799 1 0.730518 0.246975 0.876329
1787 1 0.511801 0.47779 0.790827
1894 1 0.939661 0.143182 0.712163
1792 1 0.920029 0.486505 0.885446
534 1 0.983429 0.0980553 0.844684
1261 1 0.95442 0.143945 0.783844
1572 1 0.874905 0.108346 0.749773
853 1 0.971676 0.0824322 0.740209
1708 1 0.0327374 0.111234 0.905917
377 1 0.995388 0.443604 0.886488
31 1 0.921088 0.0829527 0.808918
2031 1 0.883608 0.108759 0.67497
235 1 0.13152 0.11345 0.661227
748 1 0.165434 0.227754 0.681942
27 1 0.0118362 0.298685 0.713679
576 1 0.112439 0.306227 0.672851
1980 1 0.00651285 0.331874 0.638585
1415 1 0.0539609 0.354316 0.880501
465 1 0.0171443 0.24626 0.768427
593 1 0.10286 0.227238 0.794034
372 1 0.300256 0.238876 0.755291
1756 1 0.245526 0.195394 0.685533
1514 1 0.214485 0.246657 0.738072
676 1 0.384494 0.169819 0.660981
2048 1 0.090524 0.179204 0.727329
903 1 0.189793 0.289776 0.797526
682 1 0.198469 0.291247 0.662108
755 1 0.268838 0.272777 0.814966
811 1 0.420153 0.0955343 0.631079
1140 1 0.25738 0.313022 0.743541
1994 1 0.486558 0.233615 0.710903
896 1 0.333723 0.374721 0.727073
1403 1 0.362782 0.188329 0.7847
270 1 0.404154 0.312293 0.71879
951 1 0.453073 0.110962 0.716843
133 1 0.242398 0.342102 0.809064
1252 1 0.358626 0.30995 0.772328
1142 1 0.421681 0.30826 0.811664
483 1 0.450661 0.331461 0.658897
859 1 0.523912 0.318999 0.683748
394 1 0.626728 0.370126 0.667437
677 1 0.475894 0.254041 0.847552
509 1 0.411916 0.0572915 0.779021
1921 1 0.605988 0.281893 0.784056
976 1 0.539276 0.299365 0.834428
1963 1 0.623429 0.301714 0.694868
1467 1 0.438574 0.134869 0.778963
169 1 0.537267 0.269841 0.747853
452 1 0.689594 0.307375 0.657779
171 1 0.589017 0.316911 0.61801
1045 1 0.727716 0.285566 0.75446
1051 1 0.653831 0.341866 0.762521
1540 1 0.655833 0.231484 0.804152
1846 1 0.637232 0.403456 0.811594
1181 1 0.736158 0.340313 0.706429
277 1 0.610068 0.328525 0.836875
300 1 0.625673 0.395009 0.901551
1421 1 0.826249 0.347724 0.78036
1786 1 0.869309 0.277122 0.765198
658 1 0.89741 0.189926 0.750296
1204 1 0.758528 0.206531 0.797162
837 1 0.794246 0.384399 0.70702
587 1 0.793336 0.28606 0.80529
125 1 0.683272 0.368765 0.861525
1337 1 0.813829 0.238856 0.857709
289 1 0.744174 0.435456 0.761664
404 1 0.873228 0.302711 0.638732
1113 1 0.936714 0.246922 0.759897
1306 1 0.878587 0.187192 0.817116
1573 1 0.0108628 0.184186 0.708029
540 1 0.958528 0.357106 0.757746
1681 1 0.882351 0.268433 0.848554
1461 1 0.954353 0.179313 0.855763
1419 1 0.630223 0.4202 0.557551
408 1 0.125847 0.322863 0.793718
1803 1 0.0552169 0.332311 0.777906
1365 1 0.0653117 0.317492 0.505519
905 1 0.106969 0.373782 0.650445
40 1 0.0306019 0.400372 0.804406
829 1 0.0449853 0.365543 0.688764
814 1 0.101254 0.386519 0.817819
1617 1 0.256889 0.387634 0.744314
1985 1 0.158106 0.434767 0.679082
442 1 0.54114 0.497283 0.858838
2039 1 0.175727 0.356269 0.703233
578 1 0.18641 0.40195 0.623966
1256 1 0.189453 0.421007 0.769379
1730 1 0.175858 0.371832 0.834473
583 1 0.235132 0.436293 0.676781
513 1 0.338504 0.451462 0.701739
1895 1 0.720484 0.438815 0.534446
690 1 0.393624 0.38566 0.827103
451 1 0.289688 0.474968 0.769298
942 1 0.352666 0.439717 0.777289
1399 1 0.264318 0.411326 0.814834
873 1 0.469665 0.360863 0.979962
615 1 0.467509 0.392686 0.71544
1027 1 0.563715 0.380569 0.714549
855 1 0.392861 0.380871 0.662616
444 1 0.514713 0.401153 0.774165
585 1 0.437234 0.448003 0.771325
1982 1 0.220648 0.495194 0.885104
1821 1 0.481234 0.327793 0.75382
2035 1 0.483472 0.46918 0.718693
491 1 0.684428 0.397491 0.710523
970 1 0.5763 0.356863 0.784247
990 1 0.708475 0.416495 0.820203
1732 1 0.644791 0.442154 0.63654
1322 1 0.612226 0.44407 0.699649
1106 1 0.510412 0.0443401 0.520973
1179 1 0.840728 0.444494 0.691635
1883 1 0.547478 0.361083 0.960331
1101 1 0.866415 0.34974 0.950291
914 1 0.723005 0.299484 0.995932
7 1 0.739132 0.356205 0.781394
1647 1 0.736125 0.42975 0.672923
322 1 0.790598 0.429278 0.835256
1550 1 0.822894 0.422813 0.761727
66 1 0.878618 0.417742 0.885838
1033 1 0.776208 0.482783 0.701626
1234 1 0.950284 0.456137 0.745155
844 1 0.888476 0.346041 0.726966
993 1 0.906959 0.382077 0.805429
77 1 0.555818 0.498368 0.719534
1193 1 0.898506 0.412087 0.742253
1511 1 0.764557 0.0649306 0.834783
828 1 0.966794 0.46314 0.822986
431 1 0.998988 0.430487 0.666499
953 1 0.863317 0.347169 0.859265
1466 1 0.168307 0.0336741 0.926837
1285 1 0.0787284 0.119463 0.834123
1917 1 0.900482 0.0674929 0.878674
1785 1 0.134246 0.118512 0.89947
696 1 0.0730056 0.208608 0.872077
1342 1 0.145247 0.170445 0.839386
562 1 0.959795 0.139047 0.926043
210 1 0.0882263 0.0533632 0.790371
193 1 0.906046 0.407515 0.523988
1362 1 0.147072 0.117 0.980652
1945 1 0.565334 0.0784446 0.944748
1542 1 0.284425 0.339034 0.542873
2001 1 0.206562 0.147022 0.806648
626 1 0.158433 0.0983862 0.836466
1031 1 0.980843 0.334534 0.53566
301 1 0.209205 0.162938 0.88236
1750 1 0.156591 0.480424 0.86102
1899 1 0.858429 0.484711 0.61767
707 1 0.295215 0.0394964 0.884818
152 1 0.897132 0.105513 0.948845
1644 1 0.232873 0.0622069 0.831086
1324 1 0.0401163 0.00770297 0.612762
1586 1 0.427966 0.0788233 0.991051
1073 1 0.883172 0.450253 0.816374
750 1 0.40169 0.207494 0.719751
1417 1 0.326027 0.107424 0.934591
1088 1 0.699053 0.0094094 0.939297
1861 1 0.445692 0.0422746 0.846136
527 1 0.37587 0.322581 0.515837
986 1 0.510003 0.159097 0.823845
612 1 0.873306 0.0253647 0.61773
825 1 0.369183 0.0515802 0.893867
1237 1 0.427552 0.122129 0.872099
1094 1 0.497758 0.0484705 0.935039
1177 1 0.947031 0.0152089 0.623032
1579 1 0.451262 0.211139 0.935064
1684 1 0.395252 0.139611 0.936658
864 1 0.714841 0.413637 0.991667
1575 1 0.625102 0.025694 0.915024
1014 1 0.49493 0.129629 0.910423
971 1 0.490732 0.493473 0.628461
764 1 0.643646 0.169013 0.965233
474 1 0.108991 0.0266259 0.570974
1808 1 0.00751693 0.184729 0.970253
1571 1 0.414331 5.48788e-06 0.96946
715 1 0.243092 0.471121 0.534821
137 1 0.786915 0.485024 0.787793
1457 1 0.823477 0.110727 0.954066
507 1 0.833132 0.0381139 0.862906
151 1 0.400976 0.0932362 0.535222
1192 1 0.313142 0.123089 0.550909
629 1 0.803728 0.101206 0.887224
406 1 0.78622 0.0192955 0.924853
1076 1 0.731143 0.0817117 0.948775
1293 1 0.365438 0.0474125 0.971199
1405 1 0.62363 0.0856324 0.996736
542 1 0.818158 0.484566 0.88826
143 1 0.87936 0.133389 0.873453
661 1 0.369979 0.153172 0.592851
490 1 0.227316 0.022342 0.560177
1751 1 0.331287 0.0100609 0.72262
1352 1 0.117912 0.0381504 0.877123
1997 1 0.975335 0.0607532 0.903047
154 1 0.143231 0.275795 0.859995
356 1 0.0904256 0.166217 0.931438
294 1 0.0575753 0.275811 0.839725
1396 1 0.0450513 0.126325 0.990858
1936 1 0.103157 0.243134 0.922372
1054 1 0.605096 0.462456 0.860472
1547 1 0.997521 0.233164 0.912492
947 1 0.0485712 0.29568 0.9184
1220 1 0.281765 0.214283 0.882222
296 1 0.304886 0.294317 0.889239
1524 1 0.978939 0.0332719 0.798042
1226 1 0.223573 0.0998985 0.916775
1139 1 0.176866 0.224692 0.912153
1129 1 0.345571 0.253375 0.832596
358 1 0.233339 0.261976 0.937683
1227 1 0.151813 0.293108 0.947503
1816 1 0.20192 0.217991 0.822851
1223 1 0.285798 0.116105 0.881872
332 1 0.47475 0.221561 0.779743
668 1 0.3043 0.242947 0.956897
347 1 0.128144 0.49792 0.658643
1519 1 0.165304 0.367026 0.957698
57 1 0.340008 0.181236 0.910837
52 1 0.418602 0.296485 0.958606
1157 1 0.855299 0.184873 0.542965
135 1 0.298136 0.0119186 0.624536
231 1 0.408112 0.217559 0.86371
1315 1 0.399788 0.29258 0.877194
1661 1 0.805247 0.0364226 0.996762
968 1 0.516787 0.264184 0.902289
862 1 0.51179 0.261752 0.984443
206 1 0.58343 0.266294 0.961454
84 1 0.52358 0.184199 0.960169
1035 1 0.736184 0.310563 0.907959
1118 1 0.751618 0.176647 0.943854
824 1 0.67896 0.0482416 0.558415
1339 1 0.589561 0.25101 0.884787
1965 1 0.564965 0.327596 0.890902
1483 1 0.482913 0.348432 0.897403
1155 1 0.667015 0.479476 0.812784
826 1 0.661807 0.238026 0.928622
1382 1 0.641301 0.316212 0.910793
108 1 0.967105 0.00818838 0.72136
359 1 0.68823 0.452838 0.887093
1200 1 0.808938 0.299101 0.90679
82 1 0.899849 0.300092 0.907547
144 1 0.708685 0.303638 0.831338
264 1 0.863239 0.265386 0.962427
746 1 0.76658 0.360458 0.849237
1470 1 0.0502738 0.334633 0.99069
191 1 0.968961 0.301915 0.887539
1633 1 0.986006 0.259657 0.831331
1907 1 0.934737 0.319031 0.823148
1351 1 0.847364 0.451335 0.530911
1580 1 0.788346 0.25354 0.957429
1923 1 0.8376 0.202208 0.918822
857 1 0.575033 0.0186538 0.978566
1210 1 0.183634 0.171689 0.961374
1867 1 0.710156 0.218018 1.00014
1745 1 0.0412466 0.48612 0.82504
1967 1 0.0165279 0.245885 0.528116
1295 1 0.0648252 0.43081 0.868785
514 1 0.242023 0.120927 0.674095
51 1 0.184884 0.468967 0.946711
249 1 0.0877374 0.38761 0.938316
1548 1 0.469688 0.0929594 0.551479
298 1 0.00379807 0.377396 0.946889
1145 1 0.241902 0.395706 0.886565
139 1 0.717004 0.0041931 0.597878
697 1 0.321641 0.457599 0.854579
927 1 0.236098 0.378242 0.984139
1110 1 0.21979 0.307357 0.871719
1167 1 0.129073 0.335639 0.895865
1749 1 0.317282 0.385618 0.942767
1638 1 0.144228 0.406638 0.894136
260 1 0.392576 0.477316 0.829726
1910 1 0.62173 0.355184 0.97022
975 1 0.377358 0.236606 0.947012
1044 1 0.291151 0.312086 0.962362
14 1 0.319547 0.365396 0.842379
146 1 0.451783 0.443418 0.843231
1292 1 0.411675 0.407886 0.970788
482 1 0.279614 0.429947 0.985828
255 1 0.486523 0.427897 0.911364
60 1 0.485833 0.345691 0.826332
1784 1 0.518246 0.416697 0.842469
241 1 0.399968 0.362451 0.91631
1211 1 0.807372 0.377192 0.907412
1404 1 0.979195 0.435524 0.98667
832 1 0.329408 0.465429 0.509953
567 1 0.558603 0.426577 0.923878
9 1 0.728714 0.385153 0.915998
1229 1 0.504798 0.495422 0.927123
1640 1 0.634778 0.424681 0.983953
440 1 0.776651 0.345748 0.964482
495 1 0.19235 0.48975 0.626477
256 1 0.0183435 0.492751 0.940111
995 1 0.184103 0.136834 0.500193
1995 1 0.138714 0.486058 0.738631
768 1 0.202244 0.0544465 0.993546
716 1 0.600981 0.132059 0.554871
1766 1 0.0251439 0.464191 0.760839
706 1 0.705581 0.499736 0.67446
785 1 0.437797 0.236643 0.995416
229 1 0.50202 0.211393 0.510721
1108 1 0.23186 0.145291 1.00047
1747 1 0.507862 0.114034 0.988381
1888 1 0.521506 0.412402 0.501063
1714 1 0.529076 0.493956 0.994561
1286 1 0.304501 0.193877 0.505549
752 1 0.0458165 0.25251 0.990719
412 1 0.00190982 0.658234 0.13501
769 1 0.0227486 0.689842 0.0631472
520 1 0.746485 0.638144 0.00651083
75 1 0.661261 0.951178 0.241061
56 1 0.0928856 0.568936 0.19961
1081 1 0.0317137 0.576727 0.231108
740 1 0.990255 0.522586 0.106632
234 1 0.06858 0.645953 0.186022
233 1 0.0458805 0.864942 0.0147534
1544 1 0.423525 0.654386 0.446273
399 1 0.07817 0.631863 0.0905797
128 1 0.238062 0.567657 0.246935
165 1 0.204052 0.534544 0.158861
1726 1 0.0733947 0.742319 0.0829986
1705 1 0.129363 0.687357 0.105376
732 1 0.117003 0.55926 0.036439
22 1 0.550231 0.968484 0.465817
1934 1 0.643591 0.710588 0.0850077
1797 1 0.380371 0.570804 0.206111
310 1 0.676546 0.54051 0.33288
113 1 0.333247 0.581635 0.142247
1097 1 0.326478 0.589511 0.0501112
569 1 0.44402 0.607957 0.149805
1186 1 0.393028 0.618541 0.0897796
856 1 0.394334 0.540191 0.143385
100 1 0.262068 0.93524 0.465163
1141 1 0.321872 0.541217 0.413898
461 1 0.33028 0.510893 0.0894368
708 1 0.563112 0.667999 0.0670337
80 1 0.417139 0.700343 0.0360337
1383 1 0.452365 0.564753 0.0668216
721 1 0.470204 0.640478 0.0856403
684 1 0.489895 0.677364 0.161805
758 1 0.408775 0.982275 0.124013
1507 1 0.129697 0.978896 0.0888015
1056 1 0.920412 0.992752 0.135219
796 1 0.644079 0.569926 0.124995
819 1 0.195467 0.639748 0.104909
1650 1 0.717096 0.61495 0.0728458
174 1 0.639635 0.636775 0.0834239
582 1 0.709443 0.521928 0.159813
763 1 0.528824 0.622961 0.127959
407 1 0.672534 0.917906 0.309365
1493 1 0.317039 1.0008 0.457085
1802 1 0.841619 0.897273 0.486499
1597 1 0.203693 0.716847 0.475114
1887 1 0.826401 0.597502 0.101798
89 1 0.800225 0.669745 0.0667196
1707 1 0.864548 0.515084 0.176263
1518 1 0.919288 0.535879 0.125944
500 1 0.0977319 0.935147 0.499451
1956 1 0.471992 0.656932 0.00646452
1630 1 0.0102184 0.601788 0.0925497
436 1 0.997677 0.513706 0.196613
276 1 0.627367 0.500726 0.164565
1112 1 0.887241 0.691513 0.476834
424 1 0.902994 0.849909 0.49215
177 1 0.0415455 0.536635 0.0373308
1872 1 0.220887 0.513942 0.0176612
1490 1 0.518731 0.861583 0.426655
1303 1 0.0832959 0.822049 0.0990908
944 1 0.0697714 0.838428 0.251232
1591 1 0.0700938 0.804155 0.178343
1024 1 0.00480604 0.819632 0.151687
1948 1 0.108177 0.70754 0.0275575
614 1 0.565911 0.513256 0.230355
478 1 0.162117 0.841444 0.102624
1247 1 0.143418 0.639196 0.0468109
801 1 0.14176 0.766877 0.121261
1782 1 0.194274 0.701078 0.0614101
183 1 0.127894 0.611413 0.151641
530 1 0.235414 0.776689 0.243897
155 1 0.240227 0.612449 0.154964
140 1 0.0600407 0.944068 0.351912
1614 1 0.219355 0.766411 0.120456
37 1 0.343575 0.775681 0.0143122
599 1 0.000654514 0.524066 0.446384
1728 1 0.34957 0.742722 0.176194
1522 1 0.334814 0.696136 0.0362535
30 1 0.160042 0.575783 0.0899654
476 1 0.275223 0.791772 0.0550347
1183 1 0.409868 0.722003 0.104337
370 1 0.121353 0.589781 0.351994
519 1 0.934101 0.677095 0.0308721
1154 1 0.438031 0.791602 0.0661464
923 1 0.553379 0.664009 0.199938
376 1 0.510746 0.803995 0.0472169
1970 1 0.204511 0.562252 0.484698
1800 1 0.976676 0.930733 0.461681
1235 1 0.364764 0.67149 0.14607
980 1 0.491068 0.729749 0.030811
187 1 0.942547 0.868452 0.428829
1350 1 0.575753 0.83884 0.0408956
568 1 0.480441 0.937113 0.430552
35 1 0.709638 0.772824 0.0973935
1711 1 0.633258 0.786223 0.0804944
157 1 0.586549 0.734696 0.245732
1259 1 0.588215 0.693163 0.135562
1585 1 0.739764 0.709731 0.0598258
1269 1 0.767955 0.792148 0.0617156
1930 1 0.755443 0.62165 0.140146
1865 1 0.731437 0.765742 0.169763
978 1 0.643278 1.00232 0.0235709
1912 1 0.509239 0.539844 0.423862
23 1 0.84818 0.812793 0.103268
138 1 0.622656 0.627682 0.169999
957 1 0.804314 0.730279 0.17557
680 1 0.19592 0.580613 0.0127056
966 1 0.978053 0.755341 0.0612864
681 1 0.493939 0.696255 0.494774
259 1 0.899712 0.836966 0.0542692
673 1 0.0236726 0.745652 0.127369
1737 1 0.879782 0.673641 0.0814929
94 1 0.816012 0.73411 0.104209
248 1 0.945749 0.614253 0.0666086
1754 1 0.942004 0.553389 0.301625
2011 1 0.356585 0.984159 0.0766564
119 1 0.845194 0.749375 0.449856
232 1 0.133075 0.781675 0.0432766
1979 1 0.0150126 0.815304 0.0604721
789 1 0.0991777 0.902925 0.0774928
203 1 0.133258 0.929564 0.146445
834 1 0.0777571 0.8782 0.175409
81 1 0.826883 0.525391 0.0414913
12 1 0.197861 0.975388 0.0605145
1323 1 0.0716747 0.736102 0.48266
1368 1 0.246012 0.881075 0.0600353
886 1 0.205083 0.803475 0.182325
286 1 0.20896 0.873316 0.151354
273 1 0.231935 0.85914 0.222959
1275 1 0.175142 0.90548 0.063827
454 1 0.156273 0.865908 0.212454
336 1 0.194179 0.93707 0.194957
184 1 0.49426 0.693248 0.414243
1207 1 0.446737 0.845249 0.468831
1356 1 0.196727 0.817811 0.0426228
1962 1 0.21998 0.931809 0.113689
430 1 0.347427 0.952561 0.140166
1032 1 0.275394 0.917391 0.172033
1724 1 0.269663 0.838364 0.125206
1778 1 0.577575 0.840467 0.491686
979 1 0.321961 0.901966 0.0964674
1459 1 0.519306 0.905884 0.491921
297 1 0.666204 0.535843 0.0515957
1278 1 0.403343 0.86598 0.169092
714 1 0.432873 0.97778 0.0501719
1436 1 0.332192 0.799989 0.109708
773 1 0.588862 0.948958 0.106594
1338 1 0.411265 0.908817 0.0689923
39 1 0.497291 0.909106 0.0754594
1191 1 0.570741 0.744315 0.0603428
1437 1 0.244425 0.511723 0.0922676
1680 1 0.763446 0.971933 0.41286
1539 1 0.914671 0.766251 0.493621
1298 1 0.056024 0.57007 0.425563
467 1 0.542461 0.868151 0.150662
1279 1 0.0948298 0.52816 0.122357
2043 1 0.616154 0.863531 0.107877
1127 1 0.678724 0.841495 0.063566
998 1 0.578154 0.951369 0.0199806
380 1 0.561722 0.800282 0.121085
665 1 0.463306 0.857301 0.124751
777 1 0.776372 0.97767 0.085299
1503 1 0.82913 0.513999 0.444506
1569 1 0.887891 0.947875 0.492468
1469 1 0.773592 0.897688 0.447862
2008 1 0.387794 0.564326 0.488724
1070 1 0.793671 0.861877 0.0615492
849 1 0.841078 0.863514 0.00457621
1720 1 0.154935 0.875514 0.491523
537 1 0.735397 0.841046 0.140455
435 1 0.879492 0.995276 0.235397
1827 1 0.0353869 0.889889 0.115268
860 1 0.0496284 0.94554 0.0327708
1873 1 0.960718 0.855542 0.0940656
565 1 0.244566 0.80655 0.494558
1719 1 0.966861 0.865688 0.00179092
943 1 0.300729 0.515077 0.248207
1794 1 0.183898 0.912237 0.419189
1992 1 0.986058 0.926904 0.0706162
1296 1 0.905491 0.906064 0.0223585
426 1 0.157226 0.942454 0.295505
1904 1 0.824191 0.963473 0.450865
1701 1 0.415508 0.587988 0.411784
1916 1 0.987719 0.703911 0.195822
186 1 0.99376 0.607483 0.286632
1666 1 0.058318 0.626622 0.345797
689 1 0.985093 0.631901 0.208407
913 1 0.081447 0.50734 0.340928
1830 1 0.0471003 0.67615 0.256917
1801 1 0.282898 0.534609 0.172606
1689 1 0.621405 0.724556 0.494948
997 1 0.302593 0.854475 0.0173343
737 1 0.210487 0.995067 0.296648
1082 1 0.179959 0.581828 0.210631
1648 1 0.316467 0.598252 0.274203
1314 1 0.195632 0.589777 0.294155
287 1 0.0878309 0.985036 0.296902
1003 1 0.404517 0.606591 0.303494
1095 1 0.324764 0.598885 0.354639
1604 1 0.373307 0.843664 0.0477647
441 1 0.369251 0.529121 0.285028
1172 1 0.440439 0.744636 0.24009
1166 1 0.535184 0.624701 0.263818
1233 1 0.500276 0.615803 0.197563
1454 1 0.290818 0.897779 0.39174
1071 1 0.500067 0.515862 0.0946087
1363 1 0.517067 0.555933 0.264664
1409 1 0.0611377 0.606501 0.0213499
1675 1 0.481615 0.535173 0.166142
415 1 0.797606 0.514891 0.206972
1393 1 0.569525 0.581144 0.189132
1829 1 0.598227 0.532724 0.302994
339 1 0.686651 0.658259 0.137367
637 1 0.718068 0.512316 0.256353
317 1 0.703178 0.584818 0.224463
1474 1 0.625379 0.656893 0.278793
421 1 0.747259 0.573375 0.306538
1634 1 0.148043 0.502164 0.190458
117 1 0.164669 0.976265 0.485178
1835 1 0.384269 0.609837 0.00439297
836 1 0.730027 0.700824 0.189202
1612 1 0.783937 0.608237 0.232815
1807 1 0.945042 0.554936 0.19837
127 1 0.363808 0.844975 0.500828
1698 1 0.043692 0.568955 0.142954
1491 1 0.855719 0.544576 0.329262
427 1 0.713234 0.873653 0.380074
107 1 0.867914 0.941212 0.296127
1438 1 0.834046 0.553333 0.260825
1567 1 0.927157 0.648264 0.143013
1479 1 0.906152 0.623032 0.428028
1875 1 0.965635 0.77934 0.199029
363 1 0.140064 0.722118 0.230905
1844 1 0.099357 0.806562 0.309855
911 1 0.0824409 0.726755 0.172687
1163 1 0.945642 0.815832 0.263654
571 1 0.985673 0.721311 0.271621
770 1 0.170384 0.678492 0.16627
1187 1 0.165739 0.645839 0.23088
1736 1 0.249603 0.641415 0.216492
745 1 0.122469 0.616925 0.280722
1576 1 0.294417 0.714495 0.235561
1353 1 0.282858 0.665297 0.144261
887 1 0.171601 0.765891 0.280243
1878 1 0.291392 0.803857 0.20286
393 1 0.12543 0.792019 0.233249
818 1 0.162468 0.645557 0.349465
496 1 0.222394 0.694679 0.394656
172 1 0.324292 0.627389 0.208645
1340 1 0.374894 0.655703 0.25483
2002 1 0.301207 0.775754 0.277333
1863 1 0.265106 0.63154 0.0777082
4 1 0.342071 0.846678 0.291989
2018 1 0.44516 0.815172 0.200221
608 1 0.361029 0.730907 0.242172
1893 1 0.386499 0.776217 0.296986
309 1 0.235125 0.641236 0.326489
1941 1 0.381243 0.698256 0.323354
1777 1 0.345178 0.926924 0.330963
1780 1 0.445164 0.745994 0.166081
1160 1 0.418266 0.676485 0.2024
1203 1 0.559618 0.698197 0.330557
1263 1 0.491556 0.760537 0.112222
1924 1 0.514997 0.812398 0.239498
21 1 0.407843 0.803116 0.128006
915 1 0.526996 0.735738 0.182653
221 1 0.514083 0.700937 0.259612
161 1 0.583117 0.804888 0.269231
606 1 0.365614 0.804985 0.201443
1949 1 0.713156 0.770392 0.271589
631 1 0.569651 0.796515 0.190353
68 1 0.648317 0.74557 0.143992
1072 1 0.646931 0.542767 0.228628
148 1 0.624839 0.677301 0.353047
1411 1 0.632444 0.781283 0.214995
1348 1 0.630326 0.752674 0.309344
1610 1 0.643541 0.871485 0.233267
1853 1 0.702261 0.693044 0.279093
1381 1 0.803708 0.703079 0.329803
1194 1 0.854647 0.607195 0.178341
1058 1 0.781043 0.741283 0.268265
618 1 0.645059 0.700893 0.214735
1533 1 0.857002 0.680489 0.155871
293 1 0.851285 0.755893 0.370747
1245 1 0.932422 0.785413 0.123977
820 1 0.952541 0.711149 0.115617
1913 1 0.911852 0.605645 0.25518
179 1 0.938258 0.674012 0.30866
106 1 0.861999 0.668023 0.298554
621 1 0.896253 0.759229 0.183171
416 1 0.924111 0.677806 0.220567
1977 1 0.890387 0.839054 0.223146
600 1 0.790062 0.782576 0.339412
1012 1 0.845317 0.689881 0.228097
512 1 0.838997 0.780423 0.263033
552 1 0.904896 0.731811 0.260816
64 1 0.00444246 0.935228 0.227729
1328 1 0.960807 0.868375 0.212963
1441 1 0.277414 0.885087 0.297855
934 1 0.88258 0.916379 0.22268
1587 1 0.115141 0.878588 0.303568
772 1 0.0181665 0.980428 0.414303
212 1 0.00988977 0.990994 0.293091
1667 1 0.199053 0.904009 0.34359
1534 1 0.222729 0.922419 0.269235
1246 1 0.486317 0.998606 0.258979
1162 1 0.187828 0.832067 0.317233
1757 1 0.0144478 0.516737 0.280136
581 1 0.260804 0.821051 0.334331
639 1 0.280274 0.956677 0.335444
271 1 0.701109 1.00001 0.205134
956 1 0.43851 0.941577 0.239612
1739 1 0.346367 0.908114 0.240767
272 1 0.644622 0.99285 0.16394
1658 1 0.325987 0.861276 0.168961
1672 1 0.428577 0.967188 0.310719
992 1 0.273741 0.988888 0.216357
640 1 0.530998 0.584987 0.490138
1903 1 0.335222 0.978557 0.272331
1998 1 0.594922 0.94484 0.262035
1369 1 0.846754 0.622007 0.0288461
1554 1 0.467296 0.963053 0.175032
901 1 0.5085 0.920396 0.273902
1090 1 0.380546 0.981109 0.209042
114 1 0.498636 0.890269 0.203756
265 1 0.0683459 0.972088 0.183904
47 1 0.622541 0.904578 0.172135
224 1 0.563294 0.869508 0.239652
1642 1 0.574285 0.953692 0.341752
1590 1 0.506614 0.813458 0.170556
906 1 0.64535 0.825531 0.162024
1017 1 0.694868 0.819142 0.211463
2041 1 0.696874 0.893805 0.186854
1560 1 0.851439 0.896213 0.0785613
1481 1 0.892794 0.915583 0.405446
1831 1 0.761041 0.871262 0.245345
1762 1 0.915588 0.931901 0.0907579
1922 1 0.79613 0.866568 0.167173
1105 1 0.764347 0.79722 0.229263
800 1 0.73496 0.830882 0.303138
439 1 0.709088 0.797528 0.406537
806 1 0.8701 0.868115 0.356419
1639 1 0.715026 0.723435 0.34757
413 1 0.656098 0.823428 0.300286
1545 1 0.874988 0.934672 0.154291
167 1 0.903778 0.85783 0.151799
223 1 0.810784 0.919467 0.216829
199 1 0.943343 0.964137 0.247686
303 1 0.827461 0.807061 0.181333
1158 1 0.835133 0.847665 0.287275
1791 1 0.0097278 0.528875 0.355183
197 1 0.134451 0.944715 0.0173865
723 1 0.983486 0.619769 0.355166
704 1 0.96414 0.576741 0.429715
1392 1 0.283049 0.97271 0.123599
539 1 0.0313142 0.677355 0.405699
617 1 0.126852 0.523395 0.457958
505 1 0.301954 0.527617 0.338675
1964 1 0.909479 0.884364 0.290416
1670 1 0.176042 0.642692 0.477843
1358 1 0.547692 0.97353 0.197004
1636 1 0.139983 0.597019 0.432315
158 1 0.241365 0.513801 0.298433
160 1 0.356009 0.92774 0.0106714
130 1 0.216813 0.517155 0.406605
1240 1 0.27596 0.664699 0.441724
1856 1 0.535854 0.996766 0.0579784
2044 1 0.140429 0.983162 0.232057
577 1 0.244049 0.571471 0.341467
313 1 0.840667 0.633309 0.4714
1364 1 0.246684 0.737647 0.316064
1310 1 0.350684 0.629661 0.480243
220 1 0.277144 0.593107 0.414328
1268 1 0.783271 0.964666 0.167128
794 1 0.317025 0.667694 0.34624
1570 1 0.449216 0.63939 0.367432
1184 1 0.446722 0.563399 0.342969
493 1 0.519809 0.60651 0.333097
701 1 0.57696 0.633473 0.386331
251 1 0.561758 0.555983 0.364709
1891 1 0.503232 0.612464 0.430295
965 1 0.641473 0.881632 -0.00176314
536 1 0.460131 0.573166 0.234212
450 1 0.0399264 0.748608 0.0163659
650 1 0.676662 0.614804 0.389136
670 1 0.685364 0.611519 0.287054
414 1 0.61648 0.551583 0.424141
691 1 0.620932 0.587964 0.339924
2032 1 0.629468 0.613298 0.465936
1300 1 0.710014 0.625146 0.466271
756 1 0.729794 0.531303 0.410031
319 1 0.786631 0.575817 0.469899
34 1 0.814909 0.622755 0.328778
340 1 0.674203 0.553249 0.469792
348 1 0.736613 0.63947 0.343279
1149 1 0.99336 0.734251 0.374671
379 1 0.801438 0.663665 0.418042
1497 1 0.934417 0.998064 0.392788
635 1 0.91694 0.520656 0.377262
549 1 0.812566 0.930013 0.378774
622 1 0.894257 0.615621 0.349419
1668 1 0.95666 0.64366 0.475624
1674 1 0.896935 0.544904 0.474721
121 1 0.121002 0.82922 0.371338
835 1 0.0265679 0.69106 0.332848
602 1 0.0569832 0.76546 0.250483
981 1 0.107673 0.762599 0.386414
1453 1 0.0378599 0.900463 0.443552
247 1 0.128597 0.697201 0.3012
645 1 0.0987282 0.687884 0.369115
1115 1 0.17736 0.736495 0.350953
1102 1 0.0602552 0.809886 0.440017
821 1 0.432508 0.744356 0.497144
603 1 0.206148 0.826502 0.387816
429 1 0.757698 0.908911 0.10886
1663 1 0.152772 0.692775 0.422862
1373 1 0.594668 0.89203 0.392548
1198 1 0.270105 0.521838 0.47116
202 1 0.204931 0.615195 0.397929
1526 1 0.804324 0.821504 0.469358
517 1 0.796209 0.945753 0.017327
375 1 0.201463 0.764287 0.421484
1152 1 0.379126 0.659475 0.386799
1818 1 0.324105 0.77412 0.468129
116 1 0.409977 0.875164 0.409176
320 1 0.266361 0.738151 0.45524
866 1 0.318299 0.743477 0.37976
921 1 0.592771 0.55012 0.0175367
464 1 0.425854 0.8268 0.34303
1159 1 0.37107 0.727942 0.427991
78 1 0.297908 0.955631 0.0439513
1500 1 0.751654 0.501642 0.0161528
26 1 0.515164 0.741155 0.366567
354 1 0.464181 0.641802 0.260793
1729 1 0.463836 0.693938 0.322533
1343 1 0.561085 0.671991 0.465978
267 1 0.462219 0.775493 0.303768
1987 1 0.433178 0.731231 0.38692
1559 1 0.65025 0.753576 0.379249
726 1 0.628497 0.674806 0.425311
1290 1 0.576385 0.722433 0.40634
1532 1 0.571778 0.794268 0.364735
492 1 0.677068 0.742052 0.446175
793 1 0.710499 0.861391 0.452868
1502 1 0.0982422 0.983696 0.434595
1771 1 0.782003 0.735879 0.415663
717 1 0.989279 0.719401 0.464014
842 1 0.862423 0.681356 0.380052
1147 1 0.769959 0.596389 0.399815
1020 1 0.973643 0.80786 0.458631
443 1 0.714955 0.677253 0.409013
1366 1 0.713698 0.952483 0.0580635
504 1 0.892437 0.790895 0.326147
180 1 0.951727 0.678802 0.398248
823 1 0.00550865 0.805251 0.386019
924 1 0.469554 0.99896 0.477291
1074 1 0.906993 0.751194 0.0299711
1156 1 0.920133 0.766895 0.40254
1272 1 0.94549 0.86087 0.355636
2012 1 0.0687537 0.920837 0.253508
929 1 0.977263 0.778603 0.320481
1170 1 0.762572 0.682547 0.483716
1606 1 0.857566 0.97638 0.0828115
1078 1 0.605063 0.866988 0.309552
384 1 0.127392 0.923857 0.384326
624 1 0.0153386 0.877937 0.298991
1665 1 0.0792148 0.742189 0.322318
1435 1 0.0568077 0.87063 0.37974
1552 1 0.91198 0.507888 0.0407968
1434 1 0.633827 0.676379 0.0070391
1961 1 0.796223 0.527921 0.387095
1898 1 0.841106 0.585494 0.415341
1444 1 0.63758 0.918188 0.0633344
1555 1 0.271412 0.820187 0.429404
1609 1 0.980029 0.933074 0.364465
1986 1 0.345328 0.9208 0.427202
2022 1 0.35541 0.816988 0.373137
156 1 0.402189 0.946691 0.392933
1556 1 0.407003 0.79934 0.426169
731 1 0.407537 0.881262 0.287407
1944 1 0.108271 0.873057 0.432153
207 1 0.417287 0.917574 0.458293
6 1 0.522004 0.759108 0.443813
1496 1 0.748643 0.513133 0.476619
1529 1 0.792767 0.842072 0.398583
1333 1 0.484494 0.891784 0.351934
840 1 0.473459 0.84792 0.278884
449 1 0.493168 0.976425 0.349359
1854 1 0.477832 0.807987 0.401273
194 1 0.757455 0.539078 0.0865354
1284 1 0.732517 0.955872 0.344844
679 1 0.611549 0.791331 0.446033
1824 1 0.511703 0.819189 0.327597
1040 1 0.882735 0.818463 0.427995
1043 1 0.649404 0.894731 0.47647
1016 1 0.337737 0.979142 0.385219
688 1 0.580563 0.594011 0.0803815
458 1 0.671749 0.734648 0.00310067
1449 1 0.0907464 0.654217 0.444836
730 1 0.712938 0.994034 0.279945
2003 1 0.399779 0.51556 0.0515371
1999 1 0.892957 0.503352 0.240678
1615 1 0.455913 0.607197 0.481178
989 1 0.631696 0.952326 0.419179
211 1 0.934519 0.966462 0.319599
2030 1 0.996135 0.632684 0.0153828
1401 1 0.956579 0.919739 0.154518
591 1 0.515467 0.970306 0.129672
1408 1 0.690161 0.936309 0.131904
1089 1 0.124382 0.787815 0.470155
1679 1 0.426002 0.508191 0.202327
1955 1 0.51301 0.89174 0.00240984
917 1 0.609085 0.52209 0.498197
1764 1 0.0247483 0.627114 0.518997
994 1 0.103631 0.588135 0.65155
373 1 0.0350732 0.508697 0.618306
170 1 0.198463 0.598399 0.549422
1475 1 0.695588 0.511796 0.529292
1703 1 0.072986 0.644721 0.56976
535 1 0.0478903 0.581168 0.584796
216 1 0.407484 0.553942 0.611864
674 1 0.249007 0.533872 0.562686
1858 1 0.176132 0.517017 0.554391
1605 1 0.270991 0.617231 0.583177
1199 1 0.195978 0.687938 0.620811
883 1 0.934831 0.529019 0.783726
246 1 0.854644 0.502585 0.955346
1021 1 0.602659 0.671607 0.934778
930 1 0.323696 0.620333 0.625957
352 1 0.928178 0.708575 0.537109
1175 1 0.375866 0.719235 0.598495
580 1 0.419523 0.613078 0.539878
874 1 0.576255 0.994544 0.714205
1769 1 0.577269 0.607277 0.559724
651 1 0.467086 0.610254 0.604317
1132 1 0.429358 0.907395 0.992456
1760 1 0.428656 0.626193 0.6654
1289 1 0.620834 0.634142 0.616616
1151 1 0.524199 0.634656 0.660217
1635 1 0.84673 0.985703 0.819474
1937 1 0.080897 0.862019 0.512971
1345 1 0.146387 0.840381 1.0006
2033 1 0.558728 0.678011 0.587871
1565 1 0.718534 0.63176 0.556388
402 1 0.670282 0.971217 0.652527
1335 1 0.659981 0.576309 0.670519
311 1 0.456197 0.541286 0.980654
1189 1 0.547388 0.70583 0.652326
1682 1 0.615913 0.697505 0.742004
33 1 0.482023 0.57436 0.908789
949 1 0.734346 0.567695 0.704934
5 1 0.652442 0.575681 0.545656
1370 1 0.93936 0.794155 0.559414
1669 1 0.754637 0.941256 0.933696
1877 1 0.30644 0.660222 0.976215
1430 1 0.0739231 0.980494 0.811346
1811 1 0.326152 0.539228 0.538547
654 1 0.796825 0.632887 0.5241
713 1 0.835933 0.524431 0.509618
470 1 0.379776 0.499642 0.973858
1637 1 0.892423 0.684535 0.616736
1060 1 0.926933 0.576823 0.649091
459 1 0.965031 0.565115 0.580986
1515 1 0.896923 0.524707 0.560163
1676 1 0.53496 0.838142 0.9543
1174 1 0.207443 0.899979 0.981413
1599 1 0.585261 0.896305 0.954679
365 1 0.912884 0.633953 0.686629
1806 1 0.466577 0.713164 0.945417
999 1 0.975678 0.852676 0.602648
838 1 0.308753 0.550781 0.606785
1471 1 0.171931 0.740189 0.540632
1165 1 0.158248 0.660255 0.571524
1231 1 0.379519 0.506281 0.749524
557 1 0.17311 0.768683 0.607047
178 1 0.0861932 0.776238 0.62248
1182 1 0.927018 0.787175 0.640348
1654 1 0.546502 0.537743 0.548411
283 1 0.212951 0.842848 0.570663
1653 1 0.633783 0.506774 0.667102
919 1 0.262723 0.782043 0.631393
244 1 0.0875417 0.71921 0.56931
1257 1 0.281723 0.736764 0.530418
742 1 0.270673 0.751937 0.70426
918 1 0.32083 0.762923 0.59164
1918 1 0.332474 0.731258 0.665267
556 1 0.317366 0.823695 0.659987
282 1 0.376738 0.785249 0.627757
366 1 0.367406 0.761715 0.532092
694 1 0.166338 0.651797 0.973536
841 1 0.408365 0.710975 0.671162
190 1 0.350108 0.658879 0.687454
102 1 0.462156 0.839616 0.609807
1201 1 0.537897 0.853966 0.580771
1372 1 0.482032 0.689515 0.635795
586 1 0.559898 0.776484 0.625525
45 1 0.615703 0.730798 0.579505
875 1 0.451575 0.688812 0.570059
987 1 0.00583602 0.990527 0.901351
724 1 0.428304 0.772183 0.984224
88 1 0.0580234 0.551862 0.80358
771 1 0.553801 0.729373 0.530419
2017 1 0.693302 0.769404 0.57357
71 1 0.626788 0.841449 0.565764
1509 1 0.690475 0.680674 0.666415
1287 1 0.187699 0.930043 0.816645
693 1 0.726104 0.839907 0.584155
1395 1 0.683689 0.696872 0.591696
1656 1 0.776262 0.82251 0.681653
1914 1 0.834658 0.759076 0.678439
1359 1 0.340662 0.610805 0.547486
1329 1 0.839978 0.747725 0.528135
2038 1 0.80653 0.831831 0.616019
1260 1 0.453994 0.511039 0.877433
1909 1 0.885069 0.754349 0.588192
92 1 0.764443 0.790466 0.529156
44 1 0.251078 0.82388 0.981836
1735 1 0.0235507 0.758714 0.598056
1271 1 0.988063 0.678692 0.567976
1489 1 0.0444538 0.690465 0.621435
907 1 0.839913 0.681342 0.550797
759 1 0.902921 0.791512 0.708478
1128 1 0.959197 0.940812 0.976366
790 1 0.923282 0.71903 0.695538
1838 1 0.133943 0.796779 0.558464
1651 1 0.812202 0.985929 0.516428
744 1 0.145166 0.964379 0.581164
722 1 0.0161939 0.89396 0.529681
1655 1 0.919271 0.897929 0.547813
1673 1 0.0495938 0.850873 0.595341
1098 1 0.21947 0.907476 0.684618
91 1 0.262181 0.958489 0.907333
1600 1 0.125437 0.871321 0.569135
355 1 0.166141 0.811811 0.704912
2029 1 0.278171 0.945223 0.582367
1146 1 0.66318 0.824989 0.504747
725 1 0.184432 0.899721 0.620161
1885 1 0.470713 0.899454 0.552388
1178 1 0.336177 0.888898 0.604908
1472 1 0.286715 0.824214 0.564214
1890 1 0.251753 0.835661 0.710398
1119 1 0.232036 0.981559 0.642745
1280 1 0.599182 0.748158 0.687661
778 1 0.610027 0.91257 0.681247
1561 1 0.518794 0.771859 0.981872
323 1 0.417015 0.828816 0.544561
1465 1 0.937309 0.978114 0.83884
984 1 0.426287 0.90135 0.61552
646 1 0.493488 0.968762 0.543447
961 1 0.51549 0.996555 0.666981
698 1 0.679491 0.896603 0.54904
973 1 0.540155 0.925216 0.625929
526 1 0.646468 0.795711 0.639924
1355 1 0.541367 0.858565 0.6579
933 1 0.507773 0.95228 0.740176
1391 1 0.614767 0.916931 0.597637
1583 1 0.732621 0.927487 0.59898
466 1 0.308694 0.940796 0.655821
1765 1 0.576131 0.97556 0.561774
1196 1 0.785886 0.973496 0.591899
335 1 0.842266 0.970073 0.641616
948 1 0.752577 0.859628 0.517815
397 1 0.735585 0.936137 0.507802
110 1 0.807989 0.896989 0.554859
1504 1 0.696363 0.547076 0.615364
1427 1 0.841384 0.823412 0.54316
228 1 0.490559 0.638493 0.528369
497 1 0.462137 0.968778 0.917684
1530 1 0.0324197 0.529361 0.531728
53 1 0.41963 0.973581 0.860301
739 1 0.224624 0.902384 0.539887
743 1 0.286008 0.589192 0.501123
1692 1 0.932382 0.985443 0.545403
1822 1 0.0181074 0.789848 0.520986
164 1 0.928926 0.90562 0.621159
867 1 0.806246 0.901147 0.63186
892 1 0.941219 0.852097 0.682847
982 1 0.734322 0.536093 0.950643
1309 1 0.853332 0.96492 0.568427
411 1 0.0840881 0.577167 0.725258
1906 1 0.152382 0.64548 0.665203
498 1 0.172932 0.629847 0.808163
1734 1 0.724108 0.525409 0.778575
1433 1 0.0665814 0.715613 0.727565
1276 1 0.112118 0.655789 0.724594
1423 1 0.0169619 0.585177 0.685594
1866 1 0.39624 0.957501 0.777084
1589 1 0.211686 0.616669 0.625833
1702 1 0.25588 0.561218 0.656337
908 1 0.236734 0.505012 0.71165
1531 1 0.947288 0.524591 0.946681
1385 1 0.256357 0.680614 0.681911
1652 1 0.209601 0.603053 0.698631
641 1 0.204516 0.56651 0.850641
1219 1 0.172828 0.562928 0.762401
1607 1 0.129188 0.708333 0.628554
263 1 0.164393 0.557458 0.657888
1100 1 0.266301 0.5658 0.766696
1983 1 0.317098 0.538853 0.720062
420 1 0.282975 0.701536 0.615773
1347 1 0.39763 0.605635 0.780938
1713 1 0.466259 0.732536 0.874851
1484 1 0.348956 0.559688 0.83577
19 1 0.516304 0.542819 0.675222
870 1 0.498446 0.542644 0.748005
1171 1 0.288708 0.61694 0.6955
727 1 0.446925 0.519074 0.802525
1805 1 0.459684 0.630011 0.79883
1814 1 0.381324 0.676167 0.851975
525 1 0.501276 0.611949 0.729096
162 1 0.433101 0.552489 0.688771
1947 1 0.47217 0.541979 0.50854
72 1 0.610985 0.621745 0.756114
1981 1 0.671782 0.578889 0.804928
67 1 0.596643 0.65826 0.689486
902 1 0.691464 0.627641 0.715213
1188 1 0.762126 0.684722 0.666006
1770 1 0.785566 0.619613 0.701966
387 1 0.589864 0.580591 0.699383
341 1 0.579399 0.549615 0.609773
1626 1 0.668065 0.551365 0.74189
315 1 0.299876 0.667887 0.525641
432 1 0.58876 0.691929 0.809642
1776 1 0.789133 0.540604 0.739826
663 1 0.750655 0.610211 0.793023
1010 1 0.837264 0.575701 0.572248
2024 1 0.832481 0.592969 0.782193
1825 1 0.743683 0.604763 0.642458
888 1 0.85998 0.601655 0.652381
805 1 0.990791 0.969316 0.665485
1460 1 0.865119 0.643828 0.74891
1037 1 0.839227 0.529006 0.67056
1120 1 0.958398 0.562837 0.724267
1775 1 0.00237401 0.723351 0.669404
468 1 0.875104 0.556068 0.737037
1273 1 0.0363285 0.635543 0.736919
1053 1 0.920998 0.611055 0.800309
506 1 0.104924 0.934153 0.877035
939 1 0.128741 0.760828 0.681283
1047 1 0.0624681 0.649207 0.67426
242 1 0.0525264 0.775329 0.694667
1241 1 0.976098 0.655415 0.634181
967 1 0.127674 0.745593 0.769562
1096 1 0.993286 0.724988 0.744414
1616 1 0.0680061 0.767997 0.863786
1761 1 0.0215599 0.864208 0.673652
173 1 0.0471006 0.779295 0.782396
636 1 0.186789 0.819852 0.78507
2005 1 0.227886 0.709357 0.849977
1927 1 0.178179 0.676148 0.742159
349 1 0.205172 0.828567 0.647563
1915 1 0.0946817 0.808437 0.737026
1744 1 0.213121 0.884071 0.757551
419 1 0.201538 0.759291 0.681652
1718 1 0.259527 0.882306 0.626061
1564 1 0.29026 0.73859 0.79869
1628 1 0.347902 0.878764 0.731378
1857 1 0.309509 0.681073 0.743078
1432 1 0.318367 0.819398 0.742884
1578 1 0.499612 0.728514 0.803605
240 1 0.395382 0.717283 0.743554
592 1 0.32717 0.613082 0.786812
678 1 0.400629 0.778431 0.694481
369 1 0.492159 0.772252 0.655348
863 1 0.51808 0.62626 0.843425
572 1 0.67563 0.654262 0.784683
405 1 0.61579 0.885216 0.756233
1553 1 0.53847 0.702309 0.732081
1699 1 0.649652 0.807273 0.858976
1239 1 0.561157 0.769925 0.837684
1025 1 0.461615 0.784399 0.738032
324 1 0.634405 0.768665 0.780962
1584 1 0.547999 0.649246 0.778147
1407 1 0.675376 0.799868 0.716617
644 1 0.679304 0.866053 0.636917
312 1 0.686255 0.719761 0.753946
1759 1 0.730937 0.764792 0.804552
1725 1 0.792469 0.802466 0.81673
1738 1 0.831753 0.809368 0.749823
1451 1 0.861891 0.684988 0.688917
955 1 0.74988 0.739254 0.705993
1468 1 0.877743 0.74947 0.780926
1015 1 0.815406 0.694735 0.749282
274 1 0.883384 0.840492 0.599293
958 1 0.853481 0.682593 0.814955
1412 1 0.775296 0.707754 0.817618
70 1 0.922274 0.687936 0.77019
1988 1 0.856654 0.830481 0.813651
692 1 0.982212 0.793701 0.697054
69 1 0.963847 0.79663 0.815772
95 1 0.954234 0.729595 0.602969
284 1 0.927926 0.863508 0.758339
2046 1 0.0444314 0.686834 0.792585
959 1 0.0545506 0.941042 0.585548
1512 1 0.133967 0.82959 0.638814
960 1 0.631133 0.657562 0.540682
575 1 0.995048 0.911616 0.721398
1319 1 0.0431776 0.991983 0.748604
1613 1 0.83227 0.909691 0.893683
2007 1 0.190171 0.989841 0.85335
969 1 0.0627012 0.923018 0.68009
804 1 0.14743 0.880386 0.725991
1536 1 0.0913242 0.936794 0.743911
198 1 0.110209 0.905381 0.630368
1061 1 0.293309 0.929446 0.767301
1723 1 0.244082 0.975065 0.736834
1842 1 0.495311 0.812209 0.532636
1063 1 0.162067 0.950081 0.707662
266 1 0.339178 0.976594 0.82089
590 1 0.132982 0.524127 0.932356
391 1 0.24528 0.799383 0.839913
1428 1 0.413311 0.847716 0.677628
1265 1 0.381676 0.778716 0.775557
643 1 0.412383 0.898151 0.849855
810 1 0.474511 0.925383 0.797456
1123 1 0.388228 0.935596 0.672447
1431 1 0.451981 0.859641 0.751586
1973 1 0.429167 0.921601 0.727186
410 1 0.914704 0.583 0.983926
2013 1 0.975997 0.865956 0.847841
333 1 0.590149 0.836473 0.803609
1442 1 0.552237 0.920392 0.80826
63 1 0.552553 0.89325 0.730019
797 1 0.483024 0.908207 0.674604
1608 1 0.23721 0.971128 0.981629
728 1 0.614362 0.962292 0.761966
827 1 0.0838309 0.997303 0.921226
1876 1 0.63741 0.514099 0.588491
1935 1 0.635616 0.876485 0.840141
1228 1 0.600732 0.821418 0.722376
1879 1 0.688876 0.84847 0.805902
564 1 0.625616 0.981464 0.854827
852 1 0.741564 0.896185 0.66603
1772 1 0.685345 0.867229 0.733784
655 1 0.0748158 0.867174 0.873304
1455 1 0.534455 0.944914 0.930755
775 1 0.111452 0.531881 0.524624
437 1 0.854834 0.849179 0.669065
1251 1 0.762719 0.934424 0.786671
579 1 0.71725 0.980024 0.867149
1859 1 0.756022 0.859078 0.764063
417 1 0.788905 0.962129 0.86632
425 1 0.784578 0.959694 0.689683
345 1 0.826948 0.975306 0.744394
55 1 0.830513 0.908663 0.80434
598 1 0.900261 0.630116 0.528923
1313 1 0.0502867 0.912491 0.825118
1645 1 0.887377 0.935231 0.68363
1244 1 0.896932 0.950222 0.764765
1487 1 0.979086 0.951672 0.775137
1458 1 0.987213 0.935019 0.58561
2047 1 0.84872 0.879322 0.736911
1297 1 0.0962926 0.652327 0.909114
1664 1 0.0945422 0.623298 0.79761
1984 1 0.0178901 0.548012 0.893702
2036 1 0.0407855 0.615754 0.942
1706 1 0.964851 0.642139 0.74338
1834 1 0.0532344 0.687808 0.863857
1847 1 0.399067 0.673711 0.926159
389 1 0.135511 0.695783 0.505737
1216 1 0.596661 0.977733 0.642086
306 1 0.298771 0.697577 0.867213
494 1 0.110539 0.510899 0.589568
390 1 0.138804 0.537735 0.818333
686 1 0.386473 0.938221 0.550952
129 1 0.281515 0.523039 0.838942
627 1 0.164555 0.702317 0.915231
1316 1 0.404048 0.571069 0.939941
1582 1 0.240116 0.585474 0.914769
1242 1 0.338293 0.552056 0.987003
1143 1 0.549422 0.705691 0.973005
1311 1 0.482603 0.789849 0.917863
880 1 0.633405 0.509266 0.959128
1059 1 0.370157 0.515114 0.89949
1952 1 0.36655 0.742147 0.839697
736 1 0.804032 0.504167 0.583335
950 1 0.312852 0.560753 0.907877
1195 1 0.409329 0.587658 0.874159
351 1 0.682598 0.611416 0.995868
1743 1 0.998612 0.837344 0.751831
632 1 0.555882 0.545595 0.917188
1523 1 0.556268 0.53914 0.800227
570 1 0.0653996 0.516672 0.689892
357 1 0.358483 0.586168 0.683397
563 1 0.992802 0.51242 0.688701
865 1 0.254708 0.720882 0.92527
1413 1 0.625189 0.737041 0.946837
1755 1 0.664876 0.581626 0.921282
1291 1 0.591671 0.905833 0.520203
1144 1 0.746683 0.673662 0.750877
851 1 0.61423 0.543388 0.863701
1517 1 0.126206 0.607314 0.52196
895 1 0.771354 0.675336 0.891997
331 1 0.691655 0.70672 0.515849
1693 1 0.723959 0.561014 0.849374
1038 1 0.793708 0.618452 0.851865
1321 1 0.70616 0.62274 0.872703
1820 1 0.732634 0.619607 0.944159
1796 1 0.870056 0.532439 0.81348
1951 1 0.369111 0.718707 0.974831
672 1 0.94342 0.552139 0.852201
1892 1 0.919774 0.670964 0.941518
561 1 0.0294122 0.994575 0.973296
1939 1 0.021342 0.6238 0.861554
808 1 0.969289 0.600896 0.921817
1019 1 0.949008 0.644702 0.859621
16 1 0.611119 0.806694 0.970125
73 1 0.8893 0.557738 0.922333
1621 1 0.772372 0.535763 0.649178
1197 1 0.0557768 0.674231 0.993171
1485 1 0.0674974 0.739637 0.928228
1281 1 0.106311 0.814481 0.815379
850 1 0.112655 0.764097 0.978181
531 1 0.227427 0.738809 0.760575
1270 1 0.0841896 0.836291 0.96253
29 1 0.096098 0.987691 0.666174
879 1 0.00205796 0.738617 0.826085
1349 1 0.170698 0.597906 0.914614
134 1 0.327256 0.749115 0.932374
288 1 0.207077 0.646456 0.886988
1117 1 0.23892 0.6369 0.768583
1897 1 0.401334 0.755397 0.906054
1657 1 0.489243 0.976681 0.989433
1546 1 0.121881 0.688642 0.82617
1379 1 0.206185 0.756649 0.976419
463 1 0.372581 0.841377 0.882977
1632 1 0.930094 0.7971 0.964485
753 1 0.427197 0.714735 0.813497
1819 1 0.357844 0.876859 0.955313
597 1 0.317544 0.63579 0.911387
96 1 0.464453 0.841096 0.855911
453 1 0.26197 0.621975 0.85019
1993 1 0.564157 0.69029 0.877328
409 1 0.452002 0.645971 0.894453
1678 1 0.506226 0.821507 0.801771
546 1 0.491517 0.56104 0.837247
1768 1 0.980039 0.87392 0.917412
1264 1 0.773336 0.827196 0.937679
42 1 0.574674 0.769463 0.913074
346 1 0.765707 0.76782 0.874663
383 1 0.704926 0.693813 0.855821
972 1 0.726726 0.826733 0.867192
551 1 0.62724 0.617544 0.868244
149 1 0.557758 0.774215 0.755038
342 1 0.0431683 0.917729 0.945252
553 1 0.496318 0.959821 0.854225
881 1 0.719298 0.877522 0.927087
1558 1 0.679164 0.665509 0.929712
325 1 0.813124 0.636916 0.945336
1631 1 0.86065 0.598874 0.849571
1180 1 0.843819 0.826026 0.888292
489 1 0.846335 0.675445 0.884117
1161 1 0.999157 0.591864 0.791806
239 1 0.729107 0.71392 0.940271
195 1 0.998755 0.663577 0.919855
1077 1 0.838731 0.747402 0.851787
1294 1 0.916107 0.707885 0.859082
762 1 0.900132 0.789877 0.858612
1387 1 0.96566 0.72647 0.965348
803 1 1.00004 0.74119 0.897165
1478 1 0.071045 0.522627 0.971499
884 1 0.161236 0.960041 0.9235
1248 1 0.178871 0.760682 0.830595
1926 1 0.146199 0.868408 0.834576
1221 1 0.46175 0.958637 0.620592
456 1 0.0810994 0.573181 0.868733
1758 1 0.0345301 0.82036 0.833322
1418 1 0.123805 0.81008 0.902367
1360 1 0.313561 0.892216 0.531653
1029 1 0.695871 0.939228 0.989579
1079 1 0.810882 0.560258 0.916441
250 1 0.271412 0.839792 0.902469
1254 1 0.256856 0.859094 0.805366
1376 1 0.893687 0.876875 0.948666
846 1 0.126043 0.898526 0.946642
295 1 0.648156 0.975826 0.576051
858 1 0.394749 0.684292 0.515493
150 1 0.268309 0.952639 0.828335
299 1 0.193406 0.904463 0.883824
515 1 0.283758 0.901453 0.95945
996 1 0.373168 0.810606 0.950588
638 1 0.520956 0.596149 0.977044
109 1 0.326852 0.912784 0.885783
388 1 0.322562 0.803243 0.839206
1476 1 0.306912 0.973041 0.973977
782 1 0.329701 0.883217 0.821533
1443 1 0.763225 0.562114 0.541764
1593 1 0.975714 0.57978 0.505739
1709 1 0.802628 0.55015 0.833222
1208 1 0.471658 0.996734 0.800907
813 1 0.775717 0.56941 0.998666
1055 1 0.631641 0.731731 0.861542
1168 1 0.430959 0.868349 0.918547
371 1 0.867591 0.948572 0.962989
1064 1 0.00717902 0.983833 0.50999
428 1 0.390656 0.935591 0.929547
843 1 0.575744 0.853702 0.879286
188 1 0.762464 0.884564 0.990707
17 1 0.670348 0.927141 0.793038
1299 1 0.666585 0.93337 0.908965
488 1 0.223744 0.673204 0.544132
423 1 0.0134026 0.799269 0.946474
1810 1 0.576044 0.922616 0.879122
226 1 0.777799 0.886474 0.844907
1845 1 0.647447 0.861351 0.921321
1135 1 0.902945 0.862707 0.883963
433 1 0.708511 0.900167 0.854644
136 1 0.886058 0.747807 0.933277
385 1 0.199997 0.534993 0.95345
2021 1 0.819209 0.740984 0.930581
1004 1 0.914688 0.907351 0.817212
1788 1 0.918924 0.935459 0.89082
1781 1 0.856374 0.978885 0.901327
666 1 0.348443 0.981861 0.893998
1258 1 0.328688 0.98571 0.566769
1477 1 0.267771 0.567387 0.991209
1400 1 0.00189437 0.560667 0.982324
1397 1 0.465903 0.838485 0.975408
1641 1 0.786795 0.705943 0.986661
1075 1 0.572942 0.63582 0.998343
1812 1 0.84394 0.749775 0.995464
1390 1 0.650195 0.968755 0.504718
| [
"[email protected]"
]
| |
459542f9961620ec16485e394cacb1d4532de3fb | 6930e9d3372e83cf43a47ae8ad165f83a218aee2 | /capture/noworkflow/now/models/__init__.py | 92802914a0447e80de232d80944205dcce8db60e | [
"MIT"
]
| permissive | hugobowne/noworkflow | 02ab47a8b3377ee56f1e7c4552a8dbcb3d15e5f0 | 333cbe274348428f1a9514fe81406f8416036845 | refs/heads/master | 2021-01-17T20:27:16.524245 | 2015-11-18T23:53:28 | 2015-11-18T23:53:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | # Copyright (c) 2015 Universidade Federal Fluminense (UFF)
# Copyright (c) 2015 Polytechnic Institute of New York University.
# This file is part of noWorkflow.
# Please, consult the license terms in the LICENSE file.
from __future__ import (absolute_import, print_function,
division, unicode_literals)
from .model import Model
from .history import History
from .trial import Trial
from .diff import Diff
from .trial_prolog import TrialProlog
| [
"[email protected]"
]
| |
e12329cc4c890015554cf9c066e17dccb8fc1f21 | 8a269d6e597ce214dc6540230c997c801b5addd0 | /reviews/hw6.from.xbrablik/numeval.py | 705686bfcfde29092b3af1d49a6d077e4bcd74d4 | []
| no_license | oreqizer/pv248 | f9f126d40b4e331989b3b39a8367c826bc77fcd6 | 9550e91c5e3473f39488605298dee52e4240342b | refs/heads/master | 2023-03-01T21:40:02.354436 | 2021-02-08T17:31:15 | 2021-02-08T17:31:15 | 302,929,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,347 | py | ## Good order of methods -- starting from 'evaluate', then adjacent methods,
## then classes. It makes it very easy to find the way around the code.
##
## Greate usage of f-strings in you error messages. They are also very
## detailed which would be great in real-world usage if this was a small
## library.
##
## You seem to follow the PEP8 guidlines consistently, which is also very
## good. However, at a few places you slightly exceed the 80 column line
## limit.
## Not too big of a problem though, at least you stick to 99 characters at
## maximum.
##
## Very good code all in all, excellent code structure -- especially your
## usage of exceptions, it's easy to read and it's brief.
##
## Only smaller details here and there.
## I don't see the benefit of using comments instead of type hints.
## Also, why it's only used in two methods and not all methods?
# This file should implement evaluate().
import numpy as np
import lisp
from classes import Compound, Identifier, Number, String
# === PARSE ===
def evaluate(s):
try:
return eval_root(lisp.parse(s))
except Exception as err:
return Error(str(err))
def eval_root(root):
## Wht not use type hints for this? I.e.:
## > def eval_root(root) -> Union[Number, Vector, Matrix]:
## (plus add 'from typing import *' of course)
# root: Compound
# returns Number | Vector | Matrix
if type(root) is Number:
return root
if type(root) is not Compound:
raise Exception(
f"invalid root format. want {Compound}, got {type(root)}")
if len(root) < 2:
raise Exception(
f"invalid number of arguments. want 2+, got {len(root)}")
iden = root[0]
if type(iden) is not Identifier:
raise Exception(
f"invalid command format. want {Identifier}, got {type(iden)}")
t = str(iden)
if t == "vector":
return eval_vector(root)
if t == "matrix":
return eval_matrix(root)
if t == "+":
return eval_add(root)
if t == "dot":
return eval_dot(root)
if t == "cross":
return eval_cross(root)
if t == "*":
return eval_mul(root)
if t == "det":
return eval_det(root)
if t == "solve":
return eval_solve(root)
raise Exception(f"unknown type: {t}")
def eval_vector(root):
# • ‹(vector <real>+)› # <real>+ means 1 or more objects of type ‹real›
args = [a if type(a) is Number else eval_root(a) for a in root[1:]]
for a in args:
if type(a) is not Number:
raise Exception(
f"invalid Vector argument, want float, got {type(a)}")
return Vector([float(a) for a in args])
def eval_matrix(root):
# • ‹(matrix <vector>+)› # each vector is one row, starting from the top
args = [a if type(a) is Vector else eval_root(a) for a in root[1:]]
dim = len(args[0])
for a in args:
if type(a) is not Vector:
raise Exception(
f"invalid Matrix argument, want Vector, got {type(a)}")
if len(a) != dim:
raise Exception(
## This is the longest line, 99 characters.
## It's very understandable though, considering it's an error message
## string.
f"invalid Matrix argument, inconsistent vector lengths, got {len(a)}, want {dim}")
return Matrix(args)
def eval_add(root):
# • ‹(+ <vector> <vector>)› # → ‹vector› -- vector addition
# • ‹(+ <matrix> <matrix>)› # → ‹matrix› -- matrix addition
if len(root) != 3:
raise Exception(
f"invalid number of arguments. want 3, got {len(root)}")
args = [a if type(a) in [Vector, Matrix] else eval_root(a)
for a in root[1:]]
a1, a2 = args[0], args[1]
if type(a1) is type(a2):
return a1 + a2
raise Exception(
f"invalid argument types, want Vector/Matrix, got {type(a1)} and {type(a2)}")
def eval_dot(root):
# • ‹(dot <vector> <vector>)› # → ‹real› -- dot product
if len(root) != 3:
raise Exception(
f"invalid number of arguments. want 3, got {len(root)}")
args = [a if type(a) is Vector else eval_root(a) for a in root[1:]]
a1, a2 = args[0], args[1]
if type(a1) is Vector and type(a2) is Vector:
return Number(a1.dot(a2))
raise Exception(
f"invalid argument types, want Vector, got {type(a1)} and {type(a2)}")
def eval_cross(root):
# • ‹(cross <vector> <vector>)› # → ‹vector› -- cross product
if len(root) != 3:
raise Exception(
f"invalid number of arguments. want 3, got {len(root)}")
args = [a if type(a) is Vector else eval_root(a) for a in root[1:]]
a1, a2 = args[0], args[1]
if type(a1) is Vector and type(a2) is Vector:
return a1.cross(a2)
raise Exception(
f"invalid argument types, want Vector, got {type(a1)} and {type(a2)}")
def eval_mul(root):
# • ‹(* <matrix> <matrix>)› # → ‹matrix› -- matrix multiplication
if len(root) != 3:
raise Exception(
f"invalid number of arguments. want 3, got {len(root)}")
args = [a if type(a) is Matrix else eval_root(a) for a in root[1:]]
a1, a2 = args[0], args[1]
if type(a1) is Matrix and type(a2) is Matrix:
return a1 * a2
raise Exception(
f"invalid argument types, want Matrix, got {type(a1)} and {type(a2)}")
def eval_det(root):
# • ‹(det <matrix>)› # → ‹real› -- determinant of the matrix
if len(root) != 2:
raise Exception(
f"invalid number of arguments. want 2, got {len(root)}")
arg = root[1] if type(root[1]) == Matrix else eval_root(root[1])
return Number(arg.det())
def eval_solve(root):
# • ‹(solve <matrix>)› # → ‹vector› -- linear equation solver
if len(root) != 2:
raise Exception(
f"invalid number of arguments. want 2, got {len(root)}")
arg = root[1] if type(root[1]) == Matrix else eval_root(root[1])
return arg.solve()
# === CLASSES ===
## I see you use duck typing here over having a superclass (or any other
## possible solution).
## I believe having such a class with methods is_***(self): return False
## would have made for a slightly shorter code, but that's just a detail.
## (the four methods is_real, is_vector, ... are repeated in each class)
class Vector:
def __init__(self, values):
self.values = values
def is_real(self):
return False
def is_vector(self):
return True
def is_matrix(self):
return False
def is_error(self):
return False
def __eq__(self, o):
return self.values == o.values
def __len__(self):
return len(self.values)
def __iter__(self):
self.number = 0
return self
def __next__(self):
if self.number == len(self.values):
raise StopIteration
res = self.values[self.number]
self.number += 1
return Number(res)
def __str__(self):
exp = Compound([
Identifier("vector"),
*[Number(v) for v in self.values]
])
return str(exp)
def __add__(self, o):
# • ‹(+ <vector> <vector>)› # → ‹vector› -- vector addition
self.check_len(o)
return Vector(list(np.add(self.values, o.values)))
def dot(self, o):
# • ‹(dot <vector> <vector>)› # → ‹real› -- dot product
self.check_len(o)
return float(np.dot(self.values, o.values))
def cross(self, o):
# • ‹(cross <vector> <vector>)› # → ‹vector› -- cross product
if len(self) != 3 or len(o) != 3:
raise Exception(
f"cross product vectors must be of len(3), got {self} and {o}")
return Vector(list(np.cross(self.values, o.values)))
def check_len(self, o):
if len(self) != len(o):
raise Exception(f"vector length mismatch, got {self} and {o}")
class Matrix:
def __init__(self, values):
## Again, you can use type hints instead of comments:
## > def __init__(self, values: List[Vector]):
## With hints you get the added benefit of mypy static checks.
# values: [Vector]
self.values = values
self.x = len(values[0])
self.y = len(values)
def is_real(self):
return False
def is_vector(self):
return False
def is_matrix(self):
return True
def is_error(self):
return False
def __eq__(self, o):
return self.values == o.values
def __len__(self):
return len(self.values)
def __iter__(self):
self.number = 0
return self
def __next__(self):
if self.number == len(self.values):
raise StopIteration
res = self.values[self.number]
self.number += 1
return res
def __str__(self):
exp = Compound([
Identifier("matrix"),
*self.values,
])
return str(exp)
def __add__(self, o):
# • ‹(+ <matrix> <matrix>)› # → ‹matrix› -- matrix addition
if self.x != o.x or self.y != o.y:
raise Exception(
f'addition of incompatbile matrices, {self} and {o}')
return Matrix([Vector(list(r)) for r in np.add(self.rows(), o.rows())])
def __mul__(self, o):
# • ‹(* <matrix> <matrix>)› # → ‹matrix› -- matrix multiplication
if self.x != o.y:
raise Exception(
f'multiplication of incompatbile matrices, {self} and {o}')
return Matrix([Vector(list(r)) for r in np.matmul(self.rows(), o.rows())])
def det(self):
# • ‹(det <matrix>)› # → ‹real› -- determinant of the matrix
if self.x != self.y:
raise Exception(
f'determinant of a non-square matrix, {self}')
return float(np.linalg.det(self.rows()))
def solve(self):
# • ‹(solve <matrix>)› # → ‹vector› -- linear equation solver
if self.x != self.y:
raise Exception(
f'solving a non-square matrix, {self}')
_, s, vh = np.linalg.svd(np.matrix(self.rows()))
null_mask = (s < 1e-15)
null_space = np.compress(null_mask, vh, axis=0)
res = np.transpose(null_space)
if res.size == 0:
res = np.array([0 for x in range(self.y)])
else:
res_vect = []
for var in res.tolist():
for val in var:
res_vect.append(val)
break
res = np.array(res_vect)
return Vector(list(res.flat))
def rows(self):
return [v.values for v in self.values]
class Error:
def __init__(self, msg):
self.message = msg
def is_real(self):
return False
def is_vector(self):
return False
def is_matrix(self):
return False
def is_error(self):
return True
def __str__(self):
exp = Compound([
Identifier("error"),
String(self.message),
])
return str(exp)
| [
"[email protected]"
]
| |
6aba572d1cba098d672c298e067d0dfc92d91b91 | d2fc4d45b115fb861097657d00b3c5cb08e8a3ad | /scenarios/customer_create_debit/executable.py | c7e6fcc23d760ad3dc7b1ac2dcbd3e0eb70c15d5 | []
| no_license | jess010/balanced-python | 81b39f0e9d3ce52d60f2453b8c98e77f07ee3acb | b7a6bf0430ad0299d96de15ea97d3d4ccfb4c958 | refs/heads/master | 2020-12-25T16:13:35.626111 | 2013-09-20T00:14:58 | 2013-09-20T00:14:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | import balanced
balanced.configure('b5de51921b2d11e389c4026ba7cac9da')
customer = balanced.Customer.find('/v1/customers/AC6M5tIyndBqrv4fEdubPUhg')
customer.debit(amount=5000) | [
"[email protected]"
]
| |
9054213d9c841a3b76c2f2119fa94feb4f9e689d | 60530f3e1d22fcb3b51be0b9f482a912c8203bb0 | /Python_Workbook/H01.py | b9b81c4afc7f2d00d7a879db0647b1ec34ce4b0f | []
| no_license | cinxdy/Python_practice | 1afb2aca1c92d16d98459407ae02ca2ed7f7832c | 8a2642b51c6ad73840dae964b1a55cbb53f7b9f7 | refs/heads/master | 2022-05-09T00:10:15.849881 | 2022-04-03T09:38:53 | 2022-04-03T09:38:53 | 180,339,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | from random import *
count = 0
answer = randint(1,100)
while 1:
number_try = int(input("Try guess number between 1 and 100 "))
count+=1
if number_try > answer : print("answer is smaller number")
elif number_try < answer : print("answer is bigger number")
else :
print("You got it! try count : %d"%count)
break | [
"[email protected]"
]
| |
f14d86124a58ed2a0ba3b2173fa644ccf6aed101 | ba45840f241a0348d8f11df4bf5e16dee230bda5 | /config/config.py | 88fe4b900e6b8435fdccd2cd0ced9374f2fd1578 | []
| no_license | zhengxiawu/FGIR-GAN | 2d630b4330da535e157f13561160789f2d1183c4 | 2a4e94c1c736d2b20255eda34b801e9fc2be62af | refs/heads/master | 2020-03-07T02:57:37.518206 | 2018-04-04T01:42:32 | 2018-04-04T01:42:32 | 127,222,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,417 | py | # --------------------------------------------------------
# MXNet Implementation of pix2pix GAN
# Copyright (c) 2017 UIUC
# Modified by Bowen Cheng
# --------------------------------------------------------
import yaml
import numpy as np
from easydict import EasyDict as edict
config = edict()
config.MXNET_VERSION = ''
config.output_path = ''
config.symbol = ''
config.gpus = ''
config.RNG_SEED = 1
config.loadSize = 286
config.fineSize = 256
config.AtoB = False
config.netG = 'autoencoder' # 'autoencoder' or 'unet'
config.netD = 'basic' # 'basic' or 'n_layers'
config.n_layers = 0 # only used if netD=='n_layers'
config.GAN_loss = 1 # use GAN loss set to 1, do not use GAN loss set to 0
config.Trained_model_loss = 1
# default training
config.default = edict()
config.default.frequent = 20
config.default.kvstore = 'device'
# dataset related params
config.dataset = edict()
config.dataset.dataset = 'facades'
config.dataset.root = './data'
config.dataset.imageset = 'train'
config.dataset.image_root = './datasets'
config.dataset.testset = 'val'
config.dataset.mean_r = 104
config.dataset.mean_g = 117
config.dataset.mean_b = 123
config.TRAIN = edict()
config.TRAIN.optimizer = 'adam'
config.TRAIN.lr = 0.0002
config.TRAIN.beta1 = 0.5
config.TRAIN.beta2 = 0.999
config.TRAIN.momentum = 0.9
config.TRAIN.wd = 0.0005
config.TRAIN.begin_epoch = 0
config.TRAIN.end_epoch = 200
config.TRAIN.num_batches = 1000
config.TRAIN.model_prefix = ''
config.TRAIN.step_epoch = 100
config.TRAIN.decay_epoch = 100
# whether resume training
config.TRAIN.RESUME = False
# whether shuffle image
config.TRAIN.SHUFFLE = True
config.TRAIN.FLIP = True
# batch size
config.TRAIN.BATCH_SIZE = 1
config.TRAIN.epoch_end_plot_figure = True
config.TRAIN.batch_end_plot_figure = False
config.TRAIN.save_interval = 20
# L1 loss weight
config.TRAIN.lambda_l1 = 100
config.TEST = edict()
config.TEST.TEST_EPOCH = 0
config.TEST.img_h = 256
config.TEST.img_w = 256
def update_config(config_file):
exp_config = None
with open(config_file) as f:
exp_config = edict(yaml.load(f))
for k, v in exp_config.items():
if k in config:
if isinstance(v, dict):
for vk, vv in v.items():
config[k][vk] = vv
else:
config[k] = v
else:
raise ValueError("key must exist in config.py")
| [
"[email protected]"
]
| |
bfe93474345ec70c961dbdc527b854fb60902af2 | 084a13b6524e21914826e842eeefefd09570a970 | /experiments/atari_hard/montezuma_revenge/ppo_cnd_110_2.py | 3737607040f72aec5ff14669c39f29ff72e48a5b | [
"MIT"
]
| permissive | michalnand/reinforcement_learning | 28aa0e2c92b6112cf366eff0e0d6a78b9a56e94f | 01635014a37a4c871766b4cdd2caaa26a0c2d8cc | refs/heads/main | 2023-06-01T10:27:36.601631 | 2023-02-12T19:46:01 | 2023-02-12T19:46:01 | 217,841,101 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,122 | py | import time
import torch
import RLAgents
import models.ppo_cnd_110_2.src.model_ppo as ModelPPO
import models.ppo_cnd_110_2.src.model_cnd_target as ModelCNDTarget
import models.ppo_cnd_110_2.src.model_cnd as ModelCND
import models.ppo_cnd_110_2.src.config as Config
#torch.cuda.set_device("cuda:0")
#print("running on ", torch.cuda.get_device_name())
path = "models/ppo_cnd_110_2/"
config = Config.Config()
#config.envs_count = 1
envs = RLAgents.MultiEnvParallelOptimised("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezuma, config.envs_count)
#envs = RLAgents.MultiEnvSeq("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezuma, config.envs_count, True)
#envs = RLAgents.MultiEnvSeq("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezumaVideo, config.envs_count)
agent = RLAgents.AgentPPOCND(envs, ModelPPO, ModelCNDTarget, ModelCND, config)
max_iterations = 1000000
trainig = RLAgents.TrainingIterations(envs, agent, max_iterations, path, 128)
trainig.run()
'''
agent.load(path)
agent.disable_training()
while True:
reward, done, _ = agent.main()
''' | [
"[email protected]"
]
| |
2cbe46e844394113c5a7b0db976db90e3d92a72b | d12b59b33df5c467abf081d48e043dac70cc5a9c | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/capabilities_e34fedc02893b4ebddb7e5f9d242efcc.py | 4d5b4d23c25816f353599659ba9dc980cfeb59ec | [
"MIT"
]
| permissive | ajbalogh/ixnetwork_restpy | 59ce20b88c1f99f95a980ff01106bda8f4ad5a0f | 60a107e84fd8c1a32e24500259738e11740069fd | refs/heads/master | 2023-04-02T22:01:51.088515 | 2021-04-09T18:39:28 | 2021-04-09T18:39:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,008 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class Capabilities(Base):
"""A high level object that allows to define the OpenFlow Switch capabilities configuration.
The Capabilities class encapsulates a required capabilities resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'capabilities'
_SDM_ATT_MAP = {
'FlowStatistics': 'flowStatistics',
'GroupStatistics': 'groupStatistics',
'MatchIpAddressInArpPackets': 'matchIpAddressInArpPackets',
'PortStatistics': 'portStatistics',
'QueueStatistics': 'queueStatistics',
'ReassambleIpFragments': 'reassambleIpFragments',
'Reserved': 'reserved',
'SpanningTree': 'spanningTree',
'SwitchWillBlockLoopingPorts': 'switchWillBlockLoopingPorts',
'TableStatistics': 'tableStatistics',
}
def __init__(self, parent):
super(Capabilities, self).__init__(parent)
@property
def FlowStatistics(self):
"""
Returns
-------
- bool: Indicates that the ofChannel capabilities of the switch includes flow statistics.
"""
return self._get_attribute(self._SDM_ATT_MAP['FlowStatistics'])
@FlowStatistics.setter
def FlowStatistics(self, value):
self._set_attribute(self._SDM_ATT_MAP['FlowStatistics'], value)
@property
def GroupStatistics(self):
"""
Returns
-------
- bool: If true, indicates that the capabilities of the switch include Group Statistics.
"""
return self._get_attribute(self._SDM_ATT_MAP['GroupStatistics'])
@GroupStatistics.setter
def GroupStatistics(self, value):
self._set_attribute(self._SDM_ATT_MAP['GroupStatistics'], value)
@property
def MatchIpAddressInArpPackets(self):
"""
Returns
-------
- bool: If true, indicates that the capabilities of the switch includes Match IP addresses in ARP pkts.
"""
return self._get_attribute(self._SDM_ATT_MAP['MatchIpAddressInArpPackets'])
@MatchIpAddressInArpPackets.setter
def MatchIpAddressInArpPackets(self, value):
self._set_attribute(self._SDM_ATT_MAP['MatchIpAddressInArpPackets'], value)
@property
def PortStatistics(self):
"""
Returns
-------
- bool: Indicates that the ofChannel capabilities of the switch includes port statistics.
"""
return self._get_attribute(self._SDM_ATT_MAP['PortStatistics'])
@PortStatistics.setter
def PortStatistics(self, value):
self._set_attribute(self._SDM_ATT_MAP['PortStatistics'], value)
@property
def QueueStatistics(self):
"""
Returns
-------
- bool: Indicates that the capabilities of the switch include Queue statistics.
"""
return self._get_attribute(self._SDM_ATT_MAP['QueueStatistics'])
@QueueStatistics.setter
def QueueStatistics(self, value):
self._set_attribute(self._SDM_ATT_MAP['QueueStatistics'], value)
@property
def ReassambleIpFragments(self):
"""
Returns
-------
- bool: Indicates that the capabilities of the switch include reassemble IP fragments at the receiver.
"""
return self._get_attribute(self._SDM_ATT_MAP['ReassambleIpFragments'])
@ReassambleIpFragments.setter
def ReassambleIpFragments(self, value):
self._set_attribute(self._SDM_ATT_MAP['ReassambleIpFragments'], value)
@property
def Reserved(self):
"""
Returns
-------
- bool: Indicates that the capabilities of the switch includes reserved, must be zero.
"""
return self._get_attribute(self._SDM_ATT_MAP['Reserved'])
@Reserved.setter
def Reserved(self, value):
self._set_attribute(self._SDM_ATT_MAP['Reserved'], value)
@property
def SpanningTree(self):
"""
Returns
-------
- bool: Indicates that the capabilities of the switch includes 802.1d spanning tree.
"""
return self._get_attribute(self._SDM_ATT_MAP['SpanningTree'])
@SpanningTree.setter
def SpanningTree(self, value):
self._set_attribute(self._SDM_ATT_MAP['SpanningTree'], value)
@property
def SwitchWillBlockLoopingPorts(self):
"""
Returns
-------
- bool: If true, indicates that switch will block looping ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['SwitchWillBlockLoopingPorts'])
@SwitchWillBlockLoopingPorts.setter
def SwitchWillBlockLoopingPorts(self, value):
self._set_attribute(self._SDM_ATT_MAP['SwitchWillBlockLoopingPorts'], value)
@property
def TableStatistics(self):
"""
Returns
-------
- bool: Indicates that the capabilities of the switch includes table statistics.
"""
return self._get_attribute(self._SDM_ATT_MAP['TableStatistics'])
@TableStatistics.setter
def TableStatistics(self, value):
self._set_attribute(self._SDM_ATT_MAP['TableStatistics'], value)
def update(self, FlowStatistics=None, GroupStatistics=None, MatchIpAddressInArpPackets=None, PortStatistics=None, QueueStatistics=None, ReassambleIpFragments=None, Reserved=None, SpanningTree=None, SwitchWillBlockLoopingPorts=None, TableStatistics=None):
"""Updates capabilities resource on the server.
Args
----
- FlowStatistics (bool): Indicates that the ofChannel capabilities of the switch includes flow statistics.
- GroupStatistics (bool): If true, indicates that the capabilities of the switch include Group Statistics.
- MatchIpAddressInArpPackets (bool): If true, indicates that the capabilities of the switch includes Match IP addresses in ARP pkts.
- PortStatistics (bool): Indicates that the ofChannel capabilities of the switch includes port statistics.
- QueueStatistics (bool): Indicates that the capabilities of the switch include Queue statistics.
- ReassambleIpFragments (bool): Indicates that the capabilities of the switch include reassemble IP fragments at the receiver.
- Reserved (bool): Indicates that the capabilities of the switch includes reserved, must be zero.
- SpanningTree (bool): Indicates that the capabilities of the switch includes 802.1d spanning tree.
- SwitchWillBlockLoopingPorts (bool): If true, indicates that switch will block looping ports.
- TableStatistics (bool): Indicates that the capabilities of the switch includes table statistics.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
| [
"[email protected]"
]
| |
a82b1bbd2b03de5d576c5401152707034fefb3a8 | d5214b1331c9dae59d95ba5b3aa3e9f449ad6695 | /qPloneResolveUID/tags/0.2.4/transforms/ruid_to_url.py | b243d8358f79baaeef8a74505a0c1b12a115c326 | []
| no_license | kroman0/products | 1661ee25a224c4b5f172f98110944f56136c77cf | f359bb64db22f468db5d1e411638790e94d535a2 | refs/heads/master | 2021-01-10T07:58:04.579234 | 2014-06-11T12:05:56 | 2014-06-11T12:05:56 | 52,677,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,059 | py | # Author: Melnychuk Taras
# Contact: [email protected]
# Date: $Date: 2006-08-11
# Copyright: quintagroup.com
import re
from Products.CMFCore.utils import getToolByName
from Products.PortalTransforms.interfaces import itransform
from Products.qPloneResolveUID.config import *
class ruid_to_url:
"""Transform which replaces resolve uid into urls"""
__implements__ = itransform
__name__ = "ruid_to_url"
inputs = ('text/html',)
output = 'text/html'
def __init__(self, name=None):
if name:
self.__name__ = name
self.tag_regexp = re.compile(TAG_PATTERN ,re.I|re.S)
self.ruid_regexp = re.compile(UID_PATTERN ,re.I|re.S)
def name(self):
return self.__name__
def find_ruid(self, data):
tags_ruid = []
unique_ruid = []
for m in self.tag_regexp.finditer(data):
ruid = re.search(self.ruid_regexp, m.group(0))
if ruid:
tags_ruid.append({m.group(0):ruid.group('uid')})
[unique_ruid.append(tu.values()[0]) for tu in tags_ruid if tu.values()[0] not in unique_ruid]
return tags_ruid, unique_ruid
def mapRUID_URL(self, unique_ruid, portal):
ruid_url = {}
rc = getToolByName(portal, 'reference_catalog')
pu = getToolByName(portal, 'portal_url')
for uid in unique_ruid:
obj = rc.lookupObject(uid)
if obj:
ruid_url[uid] = pu.getRelativeUrl(obj)
return ruid_url
def convert(self, orig, data, **kwargs):
text = orig
tags_ruid, unique_ruid = self.find_ruid(text)
if unique_ruid:
ruid_url = self.mapRUID_URL(unique_ruid, kwargs['context'])
for tag_ruid in tags_ruid:
t, uid = tag_ruid.items()[0]
if ruid_url.has_key(uid):
text = text.replace(t, t.replace('resolveuid/'+uid, ruid_url[uid]))
data.setData(text)
return data
def register():
return ruid_to_url()
| [
"mylan@4df3d6c7-0a05-0410-9bee-ae8b7a76f946"
]
| mylan@4df3d6c7-0a05-0410-9bee-ae8b7a76f946 |
51ac6b863cba692cbdc9780978fb8f213b7e3c57 | 3b225bf4895df8b5c02d82b94574ed7985b2c69f | /test_settings.py | 44a7acc2fb39588561c79457caebcf6ed97e4b11 | []
| no_license | yakky/aldryn-faq-1 | 10d8d825447a1ba8d62712fbabe988d3d8203a94 | 3749ad2568432d3e78c0d37627b1bff9f52b69b9 | refs/heads/master | 2021-01-15T14:58:46.492723 | 2015-01-16T10:08:44 | 2015-01-16T10:08:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | # -*- coding: utf-8 -*-
HELPER_SETTINGS = {
'ROOT_URLCONF': 'aldryn_faq.tests.urls',
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'adminsortable',
'aldryn_faq',
'djangocms_text_ckeditor',
'hvad',
'sortedm2m',
],
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_faq')
if __name__ == "__main__":
run()
| [
"[email protected]"
]
| |
e70eeff6fc0c80017ba79462037859c2fe864842 | df97d5b25d40b54e0714ed9c0a6dd7a579011e2e | /docs/conf.py | 8a742f6849d8b65a35bb000ecc258a60373bc48b | []
| no_license | mikadosoftware/mikadoCMS | 90ac1910b06f32bc3e808d1df656ba38a30e781c | 7bb1ca4f66b74d4529a601540e1bf469f44d3b01 | refs/heads/master | 2021-01-17T00:20:34.489198 | 2018-06-13T15:27:53 | 2018-06-13T15:27:53 | 8,103,422 | 0 | 0 | null | 2013-05-03T23:07:59 | 2013-02-08T23:27:27 | JavaScript | UTF-8 | Python | false | false | 7,983 | py | # -*- coding: utf-8 -*-
#
# mikadoCMS documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 21 18:23:49 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'mikadoCMS'
copyright = u'2013, Paul Brian'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'mikadoCMSdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'mikadoCMS.tex', u'mikadoCMS Documentation',
u'Paul Brian', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mikadocms', u'mikadoCMS Documentation',
[u'Paul Brian'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'mikadoCMS', u'mikadoCMS Documentation',
u'Paul Brian', 'mikadoCMS', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| [
"[email protected]"
]
| |
2965f5264cd0016485601e825d6789dcf00187f9 | 6710c52d04e17facbc9fb35a7df313f7a2a7bd53 | /0319. Bulb Switcher.py | 817afd635d32758fc9c7054706a3bdac532686ba | []
| no_license | pwang867/LeetCode-Solutions-Python | 535088fbe747a453360457728cc22cf336020bd2 | 188befbfb7080ba1053ee1f7187b177b64cf42d2 | refs/heads/master | 2022-11-13T16:20:28.211707 | 2020-06-28T06:01:14 | 2020-06-28T06:01:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,051 | py | # this question is basicly asking for the count of divisors of a number
# only lighbulbs in m^2 will be on, becuase dividors will always
# appears in a pair except m^2, so only m^2 will have odd numbers of dividors
class Solution(object):
def bulbSwitch(self, n):
"""
:type n: int
:rtype: int
"""
return int(pow(n, 0.5))
"""
There are n bulbs that are initially off. You first turn on all the bulbs. Then, you turn off every second bulb. On the third round, you toggle every third bulb (turning on if it's off or turning off if it's on). For the i-th round, you toggle every i bulb. For the n-th round, you only toggle the last bulb. Find how many bulbs are on after n rounds.
Example:
Input: 3
Output: 1
Explanation:
At first, the three bulbs are [off, off, off].
After first round, the three bulbs are [on, on, on].
After second round, the three bulbs are [on, off, on].
After third round, the three bulbs are [on, off, off].
So you should return 1, because there is only one bulb is on.
"""
| [
"[email protected]"
]
| |
da368c29fe5f4ac1cb6a72a94ee2a496750a1b3c | 1c8a776b778abc6670f0db4544b76b9d517d9dd7 | /src/erpbrasil/edoc/nfse/__init__.py | 8be26d2c00ff647c53677f72f1e94c131d01b441 | [
"MIT"
]
| permissive | insol-tecnologia/erpbrasil.edoc | 6c656677f70d0058ae43271fe67c2459863629cc | 1b0a7c519b42cc938e0e5f3abbf1fbe899010c76 | refs/heads/master | 2022-07-03T17:53:29.345598 | 2020-05-14T14:26:21 | 2020-05-14T14:26:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py |
from .nfse import NFSe, ServicoNFSe
from .ginfes import Ginfes
from .dsf import Dsf
| [
"[email protected]"
]
| |
a8dc55b35485cf3f9cfe48cd3ee22b0edfabc6f9 | 00af09f4ac6f98203910d86c3791c152184ace9a | /node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py | c92bdeb46367498f65acceea97b4d8f948250c88 | [
"Artistic-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"BSD-3-Clause"
]
| permissive | orf53975/CarnosOS | 621d641df02d742a2452fde2f28a28c74b32695a | d06849064e4e9f30ef901ad8cf90960e1bec0805 | refs/heads/master | 2023-03-24T08:06:48.274566 | 2017-01-05T16:41:01 | 2017-01-05T16:41:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,761 | py | <<<<<<< HEAD
<<<<<<< HEAD
#!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
self.nodes['a'].FindCycles())
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
self.assertTrue(
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([(self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a'])],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
=======
#!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
self.nodes['a'].FindCycles())
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
self.assertTrue(
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([(self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a'])],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
#!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
self.nodes['a'].FindCycles())
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
self.assertTrue(
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([(self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a'])],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| [
"[email protected]"
]
| |
dc73268b30ed69da8d008cedd34a1a6303b4a3b6 | 67bc22c4bb1388994e3c983c9be0d85b6cc80cb2 | /charmcraft/commands/store/__init__.py | c4087cdc5fcf28020a0af3b83ebb7bdcc3759ee8 | [
"Apache-2.0"
]
| permissive | msgpo/charmcraft | c35bb3eaf98a9d3e832211d185404256743acd31 | 89adbcb3c059da7c023f789ba8c978494f1d17f9 | refs/heads/master | 2022-11-15T08:39:35.942078 | 2020-07-14T14:20:41 | 2020-07-14T14:20:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,483 | py | # Copyright 2020 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For further info, check https://github.com/canonical/charmcraft
"""Commands related to the Store, a thin layer above real functionality."""
import logging
import os
import pathlib
import yaml
from tabulate import tabulate
from charmcraft.cmdbase import BaseCommand, CommandError
from .store import Store
logger = logging.getLogger('charmcraft.commands.store')
class LoginCommand(BaseCommand):
"""Log into the store."""
name = 'login'
help_msg = "login to Ubuntu Single Sign On"
def run(self, parsed_args):
"""Run the command."""
store = Store()
store.login()
logger.info("Login successful")
class LogoutCommand(BaseCommand):
"""Clear store-related credentials."""
name = 'logout'
help_msg = "clear session credentials"
def run(self, parsed_args):
"""Run the command."""
store = Store()
store.logout()
logger.info("Credentials cleared")
class WhoamiCommand(BaseCommand):
"""Show login information."""
name = 'whoami'
help_msg = "returns your login information relevant to the Store"
def run(self, parsed_args):
"""Run the command."""
store = Store()
result = store.whoami()
data = [
('name:', result.name),
('username:', result.username),
('id:', result.userid),
]
table = tabulate(data, tablefmt='plain')
for line in table.splitlines():
logger.info(line)
class RegisterNameCommand(BaseCommand):
"""Register a name in the Store."""
name = 'register'
help_msg = "register a name in the Store"
def fill_parser(self, parser):
"""Add own parameters to the general parser."""
parser.add_argument('name', help="the name to register in the Store")
def run(self, parsed_args):
"""Run the command."""
store = Store()
store.register_name(parsed_args.name)
logger.info("Congrats! You are now the publisher of %r", parsed_args.name)
class ListRegisteredCommand(BaseCommand):
"""List the charms registered in the Store."""
name = 'list'
help_msg = "list the charms registered the Store"
def run(self, parsed_args):
"""Run the command."""
store = Store()
result = store.list_registered_names()
if not result:
logger.info("Nothing found")
return
headers = ['Name', 'Visibility', 'Status']
data = []
for item in result:
visibility = 'private' if item.private else 'public'
data.append([
item.name,
visibility,
item.status,
])
table = tabulate(data, headers=headers, tablefmt='plain')
for line in table.splitlines():
logger.info(line)
class UploadCommand(BaseCommand):
"""Upload a charm file to the Store."""
name = 'upload'
help_msg = "upload a charm file to the Store"
def _discover_charm(self, charm_filepath):
"""Discover the charm name and file path.
If received path is None, a metadata.yaml will be searched in the current directory. If
path is given the name is taken from the filename.
"""
if charm_filepath is None:
# discover the info using project's metadata, asume the file has the project's name
# with a .charm extension
try:
with open('metadata.yaml', 'rb') as fh:
metadata = yaml.safe_load(fh)
charm_name = metadata['name']
except (yaml.error.YAMLError, OSError, KeyError):
raise CommandError(
"Can't access name in 'metadata.yaml' file. The 'upload' command needs to be "
"executed in a valid project's directory, or point to a charm file with "
"the --charm-file option.")
charm_filepath = pathlib.Path(charm_name + '.charm').absolute()
if not os.access(str(charm_filepath), os.R_OK): # access doesnt support pathlib in 3.5
raise CommandError(
"Can't access charm file {!r}. You can indicate a charm file with "
"the --charm-file option.".format(str(charm_filepath)))
else:
# the path is given, asume the charm name is part of the file name
# XXX Facundo 2020-06-30: Actually, we need to open the ZIP file, extract the
# included metadata.yaml file, and read the name from there. Issue: #77.
charm_filepath = charm_filepath.expanduser()
if not os.access(str(charm_filepath), os.R_OK): # access doesnt support pathlib in 3.5
raise CommandError(
"Can't access the indicated charm file: {!r}".format(str(charm_filepath)))
if not charm_filepath.is_file():
raise CommandError(
"The indicated charm is not a file: {!r}".format(str(charm_filepath)))
charm_name = charm_filepath.stem
return charm_name, charm_filepath
def fill_parser(self, parser):
"""Add own parameters to the general parser."""
parser.add_argument(
'--charm-file', type=pathlib.Path,
help="the path to the charm file to be uploaded")
def run(self, parsed_args):
"""Run the command."""
name, path = self._discover_charm(parsed_args.charm_file)
store = Store()
result = store.upload(name, path)
if result.ok:
logger.info("Revision %s of %r created", result.revision, str(name))
else:
# XXX Facundo 2020-06-30: at some point in the future the Store will give us also a
# reason why it failed, to improve the message. Issue: #78.
logger.info("Upload failed: got status %r", result.status)
| [
"[email protected]"
]
| |
a62693bd536b3eb67490873a1f580a9c1efd2bcd | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_97/85.py | 8cf79e0f137aeaf913cd1a9586a7cbec25e4ea0e | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | #!/usr/bin/python
import os
import sys
fin = sys.stdin
def normalize(x):
s = str(x)
smallest = x
for i in xrange(1, len(s)):
n = int(s[i:] + s[:i])
if n < smallest:
smallest = n
return smallest
def main():
T = int(fin.readline())
for t in xrange(1, T + 1):
A, B = map(int, fin.readline().split())
m = {}
for n in xrange(A, B+1):
x = normalize(n)
if x in m:
m[x] += 1
else:
m[x] = 1
count = 0
for x in m.values():
if x > 1:
count += (x * x - x) / 2
print 'Case #%d: %d' % (t, count)
if __name__ == '__main__':
if len(sys.argv) > 1:
fin = open(sys.argv[1], 'r')
main()
| [
"[email protected]"
]
| |
e69357a0b01abbd4d042f0fe6c61619e1f299ebc | 58828acea95ec3babcada95a62af385e5e924594 | /tests/test_load.py | 4b5b36b2c319c8fce1920031831f3aab165cb01d | [
"Zlib"
]
| permissive | akx/pyalleg | 22eab5f0fe1291bcaf535cb8a264e3e0474d6378 | e14eb4dcf84f2a165fb2556ae40305a279d6e4c4 | refs/heads/master | 2021-01-21T15:22:47.855437 | 2017-06-25T21:55:07 | 2017-06-25T21:55:07 | 95,387,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,901 | py | from pyalleg import *
import time,random,math
def rnd(m): return random.randint(0,m)
init()
initGfx(0,640,480)
initKeyboard()
screen=getScreen()
page=Bitmap(640,480)
bitmap=loadBitmap("smiley.bmp")
font1=loadFont("pixelfont.pcx")
font2=loadFont("comic.pcx")
font=getFont()
t=0
sinex=640
# XColor is a class to encapsulate a color value.
# All methods return self in XColor, thus you can say
# XColor().unpack(someColor).shift1("hue",3).getColor()
# to retrieve someColor as a Allegro-compliant color
# with hue shifted by 3. That is, it's equivalent to
# someColor -> rgb triplet
# h,s,v=rgbHsv(r,g,b)
# h+=3
# r,g,b=hsvRgb(h,s,v)
# Color(r,g,b)
sinecolor=XColor(0,0,0)
sinecolor.setHsv(0,1,1)
shifts= [
("hue",1.0,1),
("sat",-0.005,1)
]
ft=time.time()
frames=0
fps=0
fpslimit=1
while not keyDown(constants.KEY_ESC):
if keyDown(constants.KEY_Q): fpslimit=1
if keyDown(constants.KEY_W): fpslimit=0
if keyDown(constants.KEY_S): page.save("test_load.bmp")
page.clear()
t+=0.1
solidMode()
for z in range(10):
sz=200+math.cos(t)*100
x=320+math.cos(t*0.5+z)*sz
y=240+math.sin(t*0.6-z)*sz
bitmap.rotateSprite(page,x-105,y-105,t*(15-z*6))
sx=sinex
sinetext="PyAlleg Sine Scroller! FPS: %d"%fps
sinecolor.shift(shifts,1)
color=sinecolor.getColor()
tx=t%20
if tx<10:
sinefont=font1
else:
sinefont=font2
for n,c in enumerate(sinetext):
xc=sx
yc=160+math.sin(t+n*0.5)*20.0
sinefont.draw(page,xc-1,yc-1,0,c)
sinefont.draw(page,xc+1,yc+1,0,c)
sinefont.draw(page,xc-1,yc+1,0,c)
sinefont.draw(page,xc+1,yc-1,0,c)
sinefont.draw(page,xc,yc,color,c)
sx+=sinefont.length(c)+1
font.draw(page,0,0,0xFFFFFF,"FPS limit[%d]: q/w | S to save image"%fpslimit)
screen.acquire()
page.blit(screen)
screen.release()
sinex-=1
if sinex<-600: sinex=640
frames+=1
if fps>30 and fpslimit:
time.sleep(0.01)
if time.time()-ft>0.2:
ft=time.time()
fps=frames*5
frames=0
| [
"[email protected]"
]
| |
4c6724a90a253228261c56b46258222d275e4a7b | 27923f62fa5544c84d3c54c90f325525205381bc | /tests/js/classifier/RandomForestClassifierTest.py | d9ac8eb96bf61721e55f9d9f20fb4b63f2a8c4b8 | [
"MIT"
]
| permissive | JasonKessler/sklearn-porter | 8e8eb34931e4c82289b6a08cdd29c1c73f032e1c | eaa094e122812d88b6f674dee9bed3ceb8b36e96 | refs/heads/master | 2020-05-20T18:42:48.472607 | 2017-02-08T22:00:22 | 2017-02-08T22:00:22 | 84,506,202 | 1 | 1 | null | 2017-03-10T01:34:15 | 2017-03-10T01:34:15 | null | UTF-8 | Python | false | false | 551 | py | # -*- coding: utf-8 -*-
import unittest
from sklearn.ensemble import RandomForestClassifier
from sklearn_porter import Porter
from ..JavaScriptTest import JavaScriptTest
class RandomForestClassifierTest(JavaScriptTest, unittest.TestCase):
def setUp(self):
super(RandomForestClassifierTest, self).setUp()
self.porter = Porter(language='js')
clf = RandomForestClassifier(n_estimators=100, random_state=0)
self._port_model(clf)
def tearDown(self):
super(RandomForestClassifierTest, self).tearDown()
| [
"[email protected]"
]
| |
a4b35ff16815f444c21e0e23f3f4ba9385c85baa | 13faa0d553ed6c6a57791db3dfdb2a0580a1695b | /CodeChef/Practice/Easy/CARVANS.py | dac8fc3fca89d8ccbf494609774fa48b766d384c | []
| no_license | kautsiitd/Competitive_Programming | ba968a4764ba7b5f2531d03fb9c53dc1621c2d44 | a0d8ae16646d73c346d9ce334e5b5b09bff67f67 | refs/heads/master | 2021-01-17T13:29:52.407558 | 2017-10-01T09:58:23 | 2017-10-01T09:58:23 | 59,496,650 | 0 | 0 | null | 2017-05-20T17:27:18 | 2016-05-23T15:56:55 | HTML | UTF-8 | Python | false | false | 275 | py | for _ in range(input()):
n = input()
a = map(int, raw_input().split())
answer = 0
maxPossibleSpeed = 100000000000000000000
for i in range(n):
if maxPossibleSpeed > a[i]:
maxPossibleSpeed = a[i]
answer += 1
print answer
| [
"[email protected]"
]
| |
b60fcc555ff549bc2fd84e7d9d52087d866b4e82 | d305e9667f18127e4a1d4d65e5370cf60df30102 | /model_zoo/official/nlp/tinybert/src/assessment_method.py | 748666e3cef397c4be599168e5dd82d4d296ad2f | [
"Apache-2.0",
"MIT",
"Libpng",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.1-only",
"AGPL-3.0-only",
"MPL-2.0-no-copyleft-exception",
"IJG",
"Zlib",
"MPL-1.1",
"BSD-3-Clause",
"BSD-3-Clause-Open-MPI",
"MPL-1.0",
"GPL-2.0-only",
"MPL-2.0",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
]
| permissive | imyzx2017/mindspore_pcl | d8e5bd1f80458538d07ef0a8fc447b552bd87420 | f548c9dae106879d1a83377dd06b10d96427fd2d | refs/heads/master | 2023-01-13T22:28:42.064535 | 2020-11-18T11:15:41 | 2020-11-18T11:15:41 | 313,906,414 | 6 | 1 | Apache-2.0 | 2020-11-18T11:25:08 | 2020-11-18T10:57:26 | null | UTF-8 | Python | false | false | 1,900 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""assessment methods"""
import numpy as np
class Accuracy():
"""Accuracy"""
def __init__(self):
self.acc_num = 0
self.total_num = 0
def update(self, logits, labels):
labels = labels.asnumpy()
labels = np.reshape(labels, -1)
logits = logits.asnumpy()
logit_id = np.argmax(logits, axis=-1)
self.acc_num += np.sum(labels == logit_id)
self.total_num += len(labels)
class F1():
"""F1"""
def __init__(self):
self.TP = 0
self.FP = 0
self.FN = 0
def update(self, logits, labels):
"""Update F1 score"""
labels = labels.asnumpy()
labels = np.reshape(labels, -1)
logits = logits.asnumpy()
logit_id = np.argmax(logits, axis=-1)
logit_id = np.reshape(logit_id, -1)
pos_eva = np.isin(logit_id, [2, 3, 4, 5, 6, 7])
pos_label = np.isin(labels, [2, 3, 4, 5, 6, 7])
self.TP += np.sum(pos_eva & pos_label)
self.FP += np.sum(pos_eva & (~pos_label))
self.FN += np.sum((~pos_eva) & pos_label)
print("-----------------precision is ", self.TP / (self.TP + self.FP))
print("-----------------recall is ", self.TP / (self.TP + self.FN))
| [
"[email protected]"
]
| |
248b5ac5c6d190627d78e44150eb5ad826a328a2 | 0db67bff1f2dcdadecf635ae535add91cb54c4f3 | /PythonBasis/week07/task08.py | 12d7899dc36dc321d5e9e2c6401c3c56ac766676 | []
| no_license | pavelbrnv/Coursera | 713fdb79dbf6fbde405fc991bd67db0cab30da00 | cc568f79229147866ff1df8539cf8ea66dc9ccca | refs/heads/master | 2023-03-07T23:21:09.685318 | 2021-02-22T15:08:27 | 2021-02-22T15:08:27 | 336,600,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 560 | py | inFile = open('input.txt', 'r', encoding='utf8')
n = int(inFile.readline())
possible_answers = set(range(1, n + 1))
while True:
line = inFile.readline().strip()
if line == 'HELP':
break
values = set(map(int, line.split()))
intersection = possible_answers & values
difference = possible_answers - values
if len(intersection) > len(difference):
possible_answers = intersection
print('YES')
else:
possible_answers = difference
print('NO')
inFile.close()
print(*sorted(possible_answers))
| [
"[email protected]"
]
| |
026207928ea78a4906fb1156e9dac2a4b63314bf | 4388363ba45b95910c25bae3d9c02ad78f4a75d6 | /python/anaconda/pkgs/bokeh-0.12.5-py27_1/lib/python2.7/site-packages/bokeh/_version.py | 2d9d3c6ac66a462a5e597d7f4a5d0c307e53a471 | []
| no_license | locolucco209/MongoScraper | d494e02531f4f165b1e821633dc9661c579337b5 | 74476c9f00ee43338af696da7e9cd02b273f9005 | refs/heads/master | 2022-11-25T19:09:27.248747 | 2018-07-10T03:54:06 | 2018-07-10T03:54:06 | 137,553,786 | 3 | 1 | null | 2022-11-16T04:32:26 | 2018-06-16T04:49:22 | null | UTF-8 | Python | false | false | 498 | py |
# This file was generated by 'versioneer.py' (0.17) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
{
"date": "2017-04-05T16:01:29-0300",
"dirty": false,
"error": null,
"full-revisionid": "8f1cfc3b8dd56f815127032a7bb9419dea372ad8",
"version": "0.12.5"
}
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
| [
"[email protected]"
]
| |
90bed141b2022685eebdfa1ee87190b50c968533 | 1125345341e496920b661e612cd67cdb96a1d170 | /createCampaign/parameter_tests/ADVERTISER_CATEGORY/test02_iab1_p.py | 866f0bccd30f505390f45962b5719ae352046ada | []
| no_license | Stephen-Williams/swarm-qa | 0bac526f0ee44b8c3677fb35959e6f7d0e258be2 | 90e36b5eab475788d9ab54051ad9c2736f3633ec | refs/heads/master | 2021-01-01T20:11:51.033059 | 2015-07-08T16:07:06 | 2015-07-08T16:07:06 | 38,764,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,698 | py | { 'all' :
{ '0':
{ 'ADOMAIN': 'abc.com',
'ADVERTISER_CATEGORY': 'IAB1',
'APP_FILTER': 'sites',
'CREATIVE_ATTR': '0',
'CREATIVE_BASE_64': 'iVBORw0KGgoAAAANSUhEUgAAAUAAAAAyCAIAAACib5WDAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAD2EAAA9hAag/p2kAAAAYdEVYdFNvZnR3YXJlAHBhaW50Lm5ldCA0LjAuNWWFMmUAAAYHSURBVHhe7ZtNaB5VFIYHUkREoYuCglIUKmRRsBQRhSwKCpGgYCCLQEWCKCgWEbGgUMii0IJFFAxkEWgoIhayCFRokBYqZBGwFJEKLbjowkUXLlx04cKFPsO9nJw589Nu2nK/eV8eJOeeO983wnfu+Zlp9d9NIUSpRFsIURDRFkIURLSFEAURbSFEQURbCFEQ0RZCFES0hRAFEW0hREFEWwhRENEWQhREtIUQBRFtIURBRFsIURDRFgNcOVe99Wp15KVqYbb68+fofSjsnK+OHa2W5qvP3q1ub0evmHiiLQYgek1XN6L3ofDs0/l+0OZK9IqJJ9pigGeezKGyZ6r657foffDcuZbvJ+mvnbhBTDzRFn0QHqZD0w3X1lourRfnHmhpTRVgOrA/esUYiLbo48fVHCrovYWG67VX8jr646eG676yfip/KaINDl4xBqIt+jj5cQ4V9O2JhmvvE3mdP/z6/eaTd/L3otXl6BVjINoTxr+/V9cv1NPjnfN36VrJnGyjKO3rJPsmWFxoIhXbeh/cEpdvrnQX2yxyG/c4T359Jn8v+nUzesUYiPbEQJDMHK4efST/vtHjj9XPWu5ca2y7cbEOgz1TeU8SLS4Fs98G7QnW6U/zSlt879+/5AttUEzQ8nUvHswmPXPakCCr79ubXYirtr+vTwduO6kdok/tyy72BJcYCdGeDD5/P8akib7Rtp34sHcb2lrb3dk5wZp+Lq+0xceSadnj8/Payd1oRAuz+XM4U3wXbWIze0x2IiQwTfeS+cVEEu0JoJ0YfWaz1HrmeF5B9K5L83XYH3w+ryByoH1m5wRrIPipt9OejW/yCjqwP/+R9PUXeY+P0j61h8zkZ9PyR9ErRkK0S4eftY8rcmxKXHSVi3N1GZyqXx+Q1LTWjuK1uhRZ0uucYHGVT7CcArcuZ6xQp2gP4jb4hGNH84fzt4k7X12u1+nYfcuNLF0b/kJfLIhREe3SobE0tfOSzbEog5OIGfpS20DgWbomkm3dhxPRZev0pabOOtbXxnzXd182vJTZPi3b0ZBcfkZFvWCuBEeAyQ4aMTaiXTS+qqQATl1oG/KV6Y0j9Qo7SZtEl02YEBFil9gEC/kxGG2tiWRr64Y9YUIfLEavLwSo3sMN+9L60tmGC+yo4sLgEuMh2kVDE2uieA5ewz8+7RPBY+HkJ1ghWnwapN31LvAFtp9LG4S0yaffhH/PuX2tHQ3hrRIxKqJdNC+/kH/TaKAtHJgeIzLb+qlGMvR58u03d9dh5nBeR4Srd4GfYNlYy+Pv5PqFhssX52zzLqClN1EFBK8YD9EuGj9tHngn2UQSo08GYuCHr+o3KDqv8hMsGx0n7LEQH+XXE36C1RlmXsFFBWEKpwZc3ciPuKntKf6DV4yHaBeNV997V74epuMN3k78BIs229bJmaa7TrDar2H4O9kz1XBxjvjmOZwaQhjRLhqvUEITLSm70kyaSGLhxaxO+iZYJG1TZyNqQcgXtSdqvgxGduLwFX7+jPypkeD/gsIe+s4pMRKiXTT+kQzZL72xTOiSwYglKuG0zVfa7ec6l842FgcmWNTeJpI5gQfWyvoJVl+q90q3R6z6SXhSOGVuXNw9GmjCvUuMjWgXzdJ8/lmbfCFqU2JfElO7LszWFwJ5L/W0vh4emGCtLud1L3vD2U+w/BMpj39qHWSHUfspkW+t214xKqJdNBTJ/mVjLyLZ8tity43Abss/0R2YYJEJif8gi9W7TrDAnw5efKn1z+0Jlh99t58ti1ER7dKhgvUPk5JIueEhDabPwyYCm26Wstl22pNe+tj2IOrM8TwNTuJy22OvYbDoX/YKrJ9qnAKHpvM7G/ZGJ5223w+Lc9lFLzDwyWIMRHsCoI+l/9xaq2Pjyrmh1wxvb9fvRVLrspP/th/kJkjsJO32FCrB5/Mhnf/kOF3oVzrhKlrfzZXGDbDItZ0zKu6E/XT4oTcWIyTaQoiCiLYQoiCiLYQoiGgLIQoi2kKIgoi2EKIgoi2EKIhoCyEKItpCiIKIthCiIKIthCiIaAshCiLaQohiuFn9D1yZ9AWuOgemAAAAAElFTkSuQmCC',
'CREATIVE_HEIGHT': 50,
'CREATIVE_NAME': 'ADVERTISER_CATEGORY is valid (single digit, no subcat)',
'CREATIVE_TYPE': '3',
'CREATIVE_WIDTH': 320,
'DAY_PARTING': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'DELIVERY_RATE_UNIT': 'impressions',
'ENCODE_A_HREF': 0,
'START': 1433908800, #June 10th 2015, midnight
'END': 1434513599, #June 16th 2015, 23:59:59
'EXCHANGE': 'mpb',
'LANG': 'en',
'LOCATIONS': ['CAN', 'USA'],
'MAX_RATE_IN_DOLLARS': 0.8,
'MPB_TYPE': '',
'NECTAR_ALLOCATION': 9202,
'NECTAR_CRID': 9202,
'QUANTITY': '1000000',
'TARGET_ANDROID': True,
'TARGET_IOS': True,
'SITE_LIST': ['0c3e797b933649ab84619d8e8a1c0ab6',
'07ab13ce6ae511e281c11231392559e4',
'f8289871fe0d48318d36bf3ea197f65d',
'bd80deae924f11e281c11231392559e4'],
'TAG': '<A HREF="http://ad.doubleclick.net/ddm/jump/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?">\r\n'
'<IMG SRC="http://ad.doubleclick.net/ddm/ad/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?" '
'BORDER=0 WIDTH=728 HEIGHT=90 '
'ALT="Advertisement"></A>'
}
}
}
| [
"[email protected]"
]
| |
4ecd192088c42fc33bb532ae1b475001c0d0992b | df2cbe914f463ad050d7ed26194424afbe3a0a52 | /addons/sms/wizard/sms_composer.py | 57b6d27ecf455b7e0e48aeeb660cccefee27162e | [
"Apache-2.0"
]
| permissive | SHIVJITH/Odoo_Machine_Test | 019ed339e995be980606a2d87a63312ddc18e706 | 310497a9872db7844b521e6dab5f7a9f61d365a4 | refs/heads/main | 2023-07-16T16:23:14.300656 | 2021-08-29T11:48:36 | 2021-08-29T11:48:36 | 401,010,175 | 0 | 0 | Apache-2.0 | 2021-08-29T10:13:58 | 2021-08-29T10:13:58 | null | UTF-8 | Python | false | false | 19,114 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from odoo import api, fields, models, _
from odoo.addons.phone_validation.tools import phone_validation
from odoo.exceptions import UserError
from odoo.tools import html2plaintext
class SendSMS(models.TransientModel):
_name = 'sms.composer'
_description = 'Send SMS Wizard'
@api.model
def default_get(self, fields):
result = super(SendSMS, self).default_get(fields)
result['res_model'] = result.get('res_model') or self.env.context.get('active_model')
if not result.get('active_domain'):
result['active_domain'] = repr(self.env.context.get('active_domain', []))
if not result.get('res_ids'):
if not result.get('res_id') and self.env.context.get('active_ids') and len(self.env.context.get('active_ids')) > 1:
result['res_ids'] = repr(self.env.context.get('active_ids'))
if not result.get('res_id'):
if not result.get('res_ids') and self.env.context.get('active_id'):
result['res_id'] = self.env.context.get('active_id')
return result
# documents
composition_mode = fields.Selection([
('numbers', 'Send to numbers'),
('comment', 'Post on a document'),
('mass', 'Send SMS in batch')], string='Composition Mode',
compute='_compute_composition_mode', readonly=False, required=True, store=True)
res_model = fields.Char('Document Model Name')
res_id = fields.Integer('Document ID')
res_ids = fields.Char('Document IDs')
res_ids_count = fields.Integer(
'Visible records count', compute='_compute_recipients_count', compute_sudo=False,
help='Number of recipients that will receive the SMS if sent in mass mode, without applying the Active Domain value')
use_active_domain = fields.Boolean('Use active domain')
active_domain = fields.Text('Active domain', readonly=True)
active_domain_count = fields.Integer(
'Active records count', compute='_compute_recipients_count', compute_sudo=False,
help='Number of records found when searching with the value in Active Domain')
comment_single_recipient = fields.Boolean(
'Single Mode', compute='_compute_comment_single_recipient', compute_sudo=False,
help='Indicates if the SMS composer targets a single specific recipient')
# options for comment and mass mode
mass_keep_log = fields.Boolean('Keep a note on document', default=True)
mass_force_send = fields.Boolean('Send directly', default=False)
mass_use_blacklist = fields.Boolean('Use blacklist', default=True)
# recipients
recipient_valid_count = fields.Integer('# Valid recipients', compute='_compute_recipients', compute_sudo=False)
recipient_invalid_count = fields.Integer('# Invalid recipients', compute='_compute_recipients', compute_sudo=False)
recipient_single_description = fields.Text('Recipients (Partners)', compute='_compute_recipient_single', compute_sudo=False)
recipient_single_number = fields.Char('Stored Recipient Number', compute='_compute_recipient_single', compute_sudo=False)
recipient_single_number_itf = fields.Char(
'Recipient Number', compute='_compute_recipient_single',
readonly=False, compute_sudo=False, store=True,
help='UX field allowing to edit the recipient number. If changed it will be stored onto the recipient.')
recipient_single_valid = fields.Boolean("Is valid", compute='_compute_recipient_single_valid', compute_sudo=False)
number_field_name = fields.Char('Number Field')
numbers = fields.Char('Recipients (Numbers)')
sanitized_numbers = fields.Char('Sanitized Number', compute='_compute_sanitized_numbers', compute_sudo=False)
# content
template_id = fields.Many2one('sms.template', string='Use Template', domain="[('model', '=', res_model)]")
body = fields.Text(
'Message', compute='_compute_body',
readonly=False, store=True, required=True)
@api.depends('res_ids_count', 'active_domain_count')
@api.depends_context('sms_composition_mode')
def _compute_composition_mode(self):
for composer in self:
if self.env.context.get('sms_composition_mode') == 'guess' or not composer.composition_mode:
if composer.res_ids_count > 1 or (composer.use_active_domain and composer.active_domain_count > 1):
composer.composition_mode = 'mass'
else:
composer.composition_mode = 'comment'
@api.depends('res_model', 'res_id', 'res_ids', 'active_domain')
def _compute_recipients_count(self):
for composer in self:
composer.res_ids_count = len(literal_eval(composer.res_ids)) if composer.res_ids else 0
if composer.res_model:
composer.active_domain_count = self.env[composer.res_model].search_count(literal_eval(composer.active_domain or '[]'))
else:
composer.active_domain_count = 0
@api.depends('res_id', 'composition_mode')
def _compute_comment_single_recipient(self):
for composer in self:
composer.comment_single_recipient = bool(composer.res_id and composer.composition_mode == 'comment')
@api.depends('res_model', 'res_id', 'res_ids', 'use_active_domain', 'composition_mode', 'number_field_name', 'sanitized_numbers')
def _compute_recipients(self):
for composer in self:
composer.recipient_valid_count = 0
composer.recipient_invalid_count = 0
if composer.composition_mode not in ('comment', 'mass') or not composer.res_model:
continue
records = composer._get_records()
if records and issubclass(type(records), self.pool['mail.thread']):
res = records._sms_get_recipients_info(force_field=composer.number_field_name, partner_fallback=not composer.comment_single_recipient)
composer.recipient_valid_count = len([rid for rid, rvalues in res.items() if rvalues['sanitized']])
composer.recipient_invalid_count = len([rid for rid, rvalues in res.items() if not rvalues['sanitized']])
else:
composer.recipient_invalid_count = 0 if (
composer.sanitized_numbers or (composer.composition_mode == 'mass' and composer.use_active_domain)
) else 1
@api.depends('res_model', 'number_field_name')
def _compute_recipient_single(self):
for composer in self:
records = composer._get_records()
if not records or not issubclass(type(records), self.pool['mail.thread']) or not composer.comment_single_recipient:
composer.recipient_single_description = False
composer.recipient_single_number = ''
composer.recipient_single_number_itf = ''
continue
records.ensure_one()
res = records._sms_get_recipients_info(force_field=composer.number_field_name, partner_fallback=False)
composer.recipient_single_description = res[records.id]['partner'].name or records.display_name
composer.recipient_single_number = res[records.id]['number'] or ''
if not composer.recipient_single_number_itf:
composer.recipient_single_number_itf = res[records.id]['number'] or ''
if not composer.number_field_name:
composer.number_field_name = res[records.id]['field_store']
@api.depends('recipient_single_number', 'recipient_single_number_itf')
def _compute_recipient_single_valid(self):
for composer in self:
value = composer.recipient_single_number_itf or composer.recipient_single_number
if value:
records = composer._get_records()
sanitized = phone_validation.phone_sanitize_numbers_w_record([value], records)[value]['sanitized']
composer.recipient_single_valid = bool(sanitized)
else:
composer.recipient_single_valid = False
@api.depends('numbers', 'res_model', 'res_id')
def _compute_sanitized_numbers(self):
for composer in self:
if composer.numbers:
record = composer._get_records() if composer.res_model and composer.res_id else self.env.user
numbers = [number.strip() for number in composer.numbers.split(',')]
sanitize_res = phone_validation.phone_sanitize_numbers_w_record(numbers, record)
sanitized_numbers = [info['sanitized'] for info in sanitize_res.values() if info['sanitized']]
invalid_numbers = [number for number, info in sanitize_res.items() if info['code']]
if invalid_numbers:
raise UserError(_('Following numbers are not correctly encoded: %s', repr(invalid_numbers)))
composer.sanitized_numbers = ','.join(sanitized_numbers)
else:
composer.sanitized_numbers = False
@api.depends('composition_mode', 'res_model', 'res_id', 'template_id')
def _compute_body(self):
for record in self:
if record.template_id and record.composition_mode == 'comment' and record.res_id:
record.body = record.template_id._render_field('body', [record.res_id], compute_lang=True)[record.res_id]
elif record.template_id:
record.body = record.template_id.body
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
@api.model
def create(self, values):
# TDE FIXME: currently have to compute manually to avoid required issue, waiting VFE branch
if not values.get('body') or not values.get('composition_mode'):
values_wdef = self._add_missing_default_values(values)
cache_composer = self.new(values_wdef)
cache_composer._compute_body()
cache_composer._compute_composition_mode()
values['body'] = values.get('body') or cache_composer.body
values['composition_mode'] = values.get('composition_mode') or cache_composer.composition_mode
return super(SendSMS, self).create(values)
# ------------------------------------------------------------
# Actions
# ------------------------------------------------------------
def action_send_sms(self):
if self.composition_mode in ('numbers', 'comment'):
if self.comment_single_recipient and not self.recipient_single_valid:
raise UserError(_('Invalid recipient number. Please update it.'))
elif not self.comment_single_recipient and self.recipient_invalid_count:
raise UserError(_('%s invalid recipients', self.recipient_invalid_count))
self._action_send_sms()
return False
def action_send_sms_mass_now(self):
if not self.mass_force_send:
self.write({'mass_force_send': True})
return self.action_send_sms()
def _action_send_sms(self):
records = self._get_records()
if self.composition_mode == 'numbers':
return self._action_send_sms_numbers()
elif self.composition_mode == 'comment':
if records is None or not issubclass(type(records), self.pool['mail.thread']):
return self._action_send_sms_numbers()
if self.comment_single_recipient:
return self._action_send_sms_comment_single(records)
else:
return self._action_send_sms_comment(records)
else:
return self._action_send_sms_mass(records)
def _action_send_sms_numbers(self):
self.env['sms.api']._send_sms_batch([{
'res_id': 0,
'number': number,
'content': self.body,
} for number in self.sanitized_numbers.split(',')])
return True
def _action_send_sms_comment_single(self, records=None):
# If we have a recipient_single_original number, it's possible this number has been corrected in the popup
# if invalid. As a consequence, the test cannot be based on recipient_invalid_count, which count is based
# on the numbers in the database.
records = records if records is not None else self._get_records()
records.ensure_one()
if self.recipient_single_number_itf and self.recipient_single_number_itf != self.recipient_single_number:
records.write({self.number_field_name: self.recipient_single_number_itf})
return self._action_send_sms_comment(records=records)
def _action_send_sms_comment(self, records=None):
records = records if records is not None else self._get_records()
subtype_id = self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note')
messages = self.env['mail.message']
for record in records:
messages |= record._message_sms(
self.body, subtype_id=subtype_id,
number_field=self.number_field_name,
sms_numbers=self.sanitized_numbers.split(',') if self.sanitized_numbers else None)
return messages
def _action_send_sms_mass(self, records=None):
records = records if records is not None else self._get_records()
sms_record_values = self._prepare_mass_sms_values(records)
sms_all = self._prepare_mass_sms(records, sms_record_values)
if sms_all and self.mass_keep_log and records and issubclass(type(records), self.pool['mail.thread']):
log_values = self._prepare_mass_log_values(records, sms_record_values)
records._message_log_batch(**log_values)
if sms_all and self.mass_force_send:
sms_all.filtered(lambda sms: sms.state == 'outgoing').send(auto_commit=False, raise_exception=False)
return self.env['sms.sms'].sudo().search([('id', 'in', sms_all.ids)])
return sms_all
# ------------------------------------------------------------
# Mass mode specific
# ------------------------------------------------------------
def _get_blacklist_record_ids(self, records, recipients_info):
""" Get a list of blacklisted records. Those will be directly canceled
with the right error code. """
if self.mass_use_blacklist:
bl_numbers = self.env['phone.blacklist'].sudo().search([]).mapped('number')
return [r.id for r in records if recipients_info[r.id]['sanitized'] in bl_numbers]
return []
def _get_done_record_ids(self, records, recipients_info):
""" Get a list of already-done records. Order of record set is used to
spot duplicates so pay attention to it if necessary. """
done_ids, done = [], []
for record in records:
sanitized = recipients_info[record.id]['sanitized']
if sanitized in done:
done_ids.append(record.id)
else:
done.append(sanitized)
return done_ids
def _prepare_recipient_values(self, records):
recipients_info = records._sms_get_recipients_info(force_field=self.number_field_name)
return recipients_info
def _prepare_body_values(self, records):
if self.template_id and self.body == self.template_id.body:
all_bodies = self.template_id._render_field('body', records.ids, compute_lang=True)
else:
all_bodies = self.env['mail.render.mixin']._render_template(self.body, records._name, records.ids)
return all_bodies
def _prepare_mass_sms_values(self, records):
all_bodies = self._prepare_body_values(records)
all_recipients = self._prepare_recipient_values(records)
blacklist_ids = self._get_blacklist_record_ids(records, all_recipients)
done_ids = self._get_done_record_ids(records, all_recipients)
result = {}
for record in records:
recipients = all_recipients[record.id]
sanitized = recipients['sanitized']
if sanitized and record.id in blacklist_ids:
state = 'canceled'
error_code = 'sms_blacklist'
elif sanitized and record.id in done_ids:
state = 'canceled'
error_code = 'sms_duplicate'
elif not sanitized:
state = 'error'
error_code = 'sms_number_format' if recipients['number'] else 'sms_number_missing'
else:
state = 'outgoing'
error_code = ''
result[record.id] = {
'body': all_bodies[record.id],
'partner_id': recipients['partner'].id,
'number': sanitized if sanitized else recipients['number'],
'state': state,
'error_code': error_code,
}
return result
def _prepare_mass_sms(self, records, sms_record_values):
sms_create_vals = [sms_record_values[record.id] for record in records]
return self.env['sms.sms'].sudo().create(sms_create_vals)
def _prepare_log_body_values(self, sms_records_values):
result = {}
for record_id, sms_values in sms_records_values.items():
result[record_id] = html2plaintext(sms_values['body'])
return result
def _prepare_mass_log_values(self, records, sms_records_values):
return {
'bodies': self._prepare_log_body_values(sms_records_values),
'message_type': 'sms',
}
# ------------------------------------------------------------
# Tools
# ------------------------------------------------------------
def _get_composer_values(self, composition_mode, res_model, res_id, body, template_id):
result = {}
if composition_mode == 'comment':
if not body and template_id and res_id:
template = self.env['sms.template'].browse(template_id)
result['body'] = template._render_template(template.body, res_model, [res_id])[res_id]
elif template_id:
template = self.env['sms.template'].browse(template_id)
result['body'] = template.body
else:
if not body and template_id:
template = self.env['sms.template'].browse(template_id)
result['body'] = template.body
return result
def _get_records(self):
if not self.res_model:
return None
if self.use_active_domain:
active_domain = literal_eval(self.active_domain or '[]')
records = self.env[self.res_model].search(active_domain)
elif self.res_ids:
records = self.env[self.res_model].browse(literal_eval(self.res_ids))
elif self.res_id:
records = self.env[self.res_model].browse(self.res_id)
else:
records = self.env[self.res_model]
records = records.with_context(mail_notify_author=True)
return records
| [
"[email protected]"
]
| |
8b2ff510d53caf77dc3fc6a53a9d025f256c25b2 | 801b637b846b5ada47c462ad8547d240ceba72b8 | /Linked Lists/Remove Linked List Elements.py | 5fa602316c130cf8f1835f61bcff3fecba943547 | []
| no_license | shlokashah/Coding-Practice | 7834fed4b50b85ddcab420e830ecec89638390a5 | a56e1a4185aba1f32c1169d486b705f28888ca07 | refs/heads/master | 2022-11-20T13:00:54.617380 | 2020-07-21T14:35:40 | 2020-07-21T14:35:40 | 252,912,592 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeElements(self, head: ListNode, val: int) -> ListNode:
temp = ListNode()
temp.next = head
cur = temp
while temp.next:
if temp.next.val == val:
temp.next = temp.next.next
else:
temp = temp.next
return cur.next
| [
"[email protected]"
]
| |
1c83408411bb27495158a05efcd8fc60e46696ab | 3395a234e7c80d011607e79c49cd48bf516f256b | /dependencies/jedi/third_party/typeshed/third_party/2and3/mypy_extensions.pyi | 19d99cc9d70ca31f5df8f823fe75a31c3fd78c00 | [
"MIT",
"Apache-2.0"
]
| permissive | srusskih/SublimeJEDI | 67329b72e184bc9584843968dcc534a002c797a1 | 95c185d778425c04536d53517b0e3fe6dedf8e59 | refs/heads/master | 2023-08-24T11:30:37.801834 | 2022-08-30T09:04:17 | 2022-08-30T09:04:17 | 6,241,108 | 669 | 125 | MIT | 2022-08-30T09:04:18 | 2012-10-16T08:23:57 | Python | UTF-8 | Python | false | false | 2,051 | pyi | import abc
import sys
from typing import (
Dict, Type, TypeVar, Optional, Union, Any, Generic, Mapping, ItemsView, KeysView, ValuesView,
Callable,
)
_T = TypeVar('_T')
_U = TypeVar('_U')
# Internal mypy fallback type for all typed dicts (does not exist at runtime)
class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
def copy(self: _T) -> _T: ...
# Using NoReturn so that only calls using mypy plugin hook that specialize the signature
# can go through.
def setdefault(self, k: NoReturn, default: object) -> object: ...
# Mypy plugin hook for 'pop' expects that 'default' has a type variable type.
def pop(self, k: NoReturn, default: _T = ...) -> object: ...
def update(self: _T, __m: _T) -> None: ...
if sys.version_info < (3, 0):
def has_key(self, k: str) -> bool: ...
def viewitems(self) -> ItemsView[str, object]: ...
def viewkeys(self) -> KeysView[str]: ...
def viewvalues(self) -> ValuesView[object]: ...
def __delitem__(self, k: NoReturn) -> None: ...
def TypedDict(typename: str, fields: Dict[str, Type[_T]], total: bool = ...) -> Type[Dict[str, Any]]: ...
def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def VarArg(type: _T = ...) -> _T: ...
def KwArg(type: _T = ...) -> _T: ...
# Return type that indicates a function does not return.
# This type is equivalent to the None type, but the no-op Union is necessary to
# distinguish the None type from the None value.
NoReturn = Union[None] # Deprecated: Use typing.NoReturn instead.
# This is intended as a class decorator, but mypy rejects abstract classes
# when a Type[_T] is expected, so we can't give it the type we want
def trait(cls: Any) -> Any: ...
def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ...
class FlexibleAlias(Generic[_T, _U]): ...
| [
"[email protected]"
]
| |
afca66273f25c8e08d273045ec0a1a360be666e4 | 00be95b38365bbf024572c4071aa20edc85deddd | /pretraining/models/customnet.py | b9207ef3e74ef8fdcbebd153d37281027ca81beb | [
"MIT"
]
| permissive | tikzoxs/EyeKnowYouSSL_SimCLR | 2fbbc00642a4068f74a1db6d82bba160ca738346 | cdb92cf43eff6396fd416b3bba2b5bffcb8072c1 | refs/heads/main | 2023-01-12T19:59:24.706886 | 2020-11-15T04:02:59 | 2020-11-15T04:02:59 | 309,214,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,108 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models as models
#create your custom net
class CustomNet(nn.Module):
def __init__(self, out_dim=64):
super(CustomNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=3, stride=1, padding=1)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1)
self.conv3 = nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1)
self.conv4 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)
self.pool = nn.MaxPool2d(2, 2)
# # projection MLP
self.l1 = nn.Linear(64, 256)
# self.l2 = nn.Linear(64, out_dim)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.pool(x)
x = self.conv2(x)
x = F.relu(x)
x = self.pool(x)
x = self.conv3(x)
x = F.relu(x)
x = self.pool(x)
x = self.conv4(x)
x = F.relu(x)
x = self.pool(x)
h = torch.mean(x, dim=[2, 3])
x = self.l1(h)
return x
| [
"[email protected]"
]
| |
775417051adf3f95c38846eb3bc4d9c9f4928314 | 951fc0da7384b961726999e5451a10e2783462c4 | /plugin.video.docuhub/default.py | 1c4f51c14b2f8c8f342fd92df7a15869ff1d45a5 | []
| no_license | vphuc81/MyRepository | eaf7b8531b2362f0e0de997a67b889bc114cd7c2 | 9bf8aca6de07fcd91bcec573f438f29e520eb87a | refs/heads/master | 2022-01-02T15:07:35.821826 | 2021-12-24T05:57:58 | 2021-12-24T05:57:58 | 37,680,232 | 6 | 10 | null | null | null | null | UTF-8 | Python | false | false | 25,642 | py | import urllib,urllib2,re,xbmcplugin,xbmcgui,xbmc, xbmcaddon, os, sys
import urlresolver
from metahandler import metahandlers
addon_id = 'plugin.video.docuhub'
#ReddiTube - Blazetamer.
addon = xbmcaddon.Addon ('plugin.video.docuhub')
#URL= 'http://www.xbmchub.com'
#PATHS
addonPath = addon.getAddonInfo('path')
artPath = addonPath + '/art/'
fanartPath = addonPath + '/art/'
#HOOKS
settings = xbmcaddon.Addon(id='plugin.video.docuhub')
#Setup Meta
grab=metahandlers.MetaData()
def GRABMETA(name,year):
meta = grab.get_meta('movie',name,year,None,None,overlay=6)
infoLabels = {'rating': meta['rating'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],
'plot': meta['plot'],'title': meta['title'],'writer': meta['writer'],'cover_url': meta['cover_url'],
'director': meta['director'],'cast': meta['cast'],'backdrop_url': meta['backdrop_url'],'tmdb_id': meta['tmdb_id'],'year': meta['year']}
return infoLabels
#AutoView
def AUTO_VIEW(content):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
if settings.getSetting('auto-view') == 'true':
if content == 'movies':
xbmc.executebuiltin("Container.SetViewMode(%s)" % settings.getSetting('movies-view') )
if content == 'list':
xbmc.executebuiltin("Container.SetViewMode(%s)" % settings.getSetting('list-view') )
else:
xbmc.executebuiltin("Container.SetViewMode(%s)" % settings.getSetting('default-view') )
#Main Links
def CATEGORIES():
if settings.getSetting('topdocfilms') == 'true':
addDir('Top Documentary Films ','none','topdoc',artPath+'topdocfilm.png')
if settings.getSetting('docnet') == 'true':
addDir('Documentary.net','none','docnet',artPath+'docnet.png')
if settings.getSetting('doclog') == 'true':
addDir('Documentary-Log ','none','doclog',artPath+'doculog.png')
if settings.getSetting('docstorm') == 'true':
addDir('Documentary Storm ','none','docstorm',artPath+'docstorm.png')
if settings.getSetting('resolver') == 'true':
addDir('[COLOR gold]Resolver Settings[/COLOR]','none','resolverSettings','')
AUTO_VIEW('list')
def TOPDOC():
addDir('9/11','http://topdocumentaryfilms.com/category/911/','tdindex','')
addDir('Art/Artists','http://topdocumentaryfilms.com/category/art-artists/','tdindex','')
addDir('Biography','http://topdocumentaryfilms.com/category/biography/','tdindex','')
addDir('Comedy','http://topdocumentaryfilms.com/category/comedy/','tdindex','')
addDir('Crime/Conspiracy','http://topdocumentaryfilms.com/category/crime-conspiracy/','tdindex','')
addDir('Crime','http://topdocumentaryfilms.com/category/crime/','tdindex','')
addDir('Drugs','http://topdocumentaryfilms.com/category/drugs/','tdindex','')
addDir('Economics','http://topdocumentaryfilms.com/category/economics/','tdindex','')
addDir('Enviroment','http://topdocumentaryfilms.com/category/enviroment/','tdindex','')
addDir('Health','http://topdocumentaryfilms.com/category/health/','tdindex','')
addDir('History','http://topdocumentaryfilms.com/category/history/','tdindex','')
addDir('Media','http://topdocumentaryfilms.com/category/media/','tdindex','')
addDir('Military/War','http://topdocumentaryfilms.com/category/military-war/','tdindex','')
addDir('Mystery','http://topdocumentaryfilms.com/category/mystery/','tdindex','')
addDir('Nature/Wildlife','http://topdocumentaryfilms.com/category/nature-wildlife/','tdindex','')
addDir('Performing Arts','http://topdocumentaryfilms.com/category/music-performing-arts/','tdindex','')
addDir('Philosophy','http://topdocumentaryfilms.com/category/philosophy/','tdindex','')
addDir('Politics','http://topdocumentaryfilms.com/category/politics/','tdindex','')
addDir('Psychology','http://topdocumentaryfilms.com/category/psychology/','tdindex','')
addDir('Religion','http://topdocumentaryfilms.com/category/religion/','tdindex','')
addDir('Science/Tech','http://topdocumentaryfilms.com/category/science-technology/','tdindex','')
addDir('Sexuality','http://topdocumentaryfilms.com/category/sex/','tdindex','')
addDir('Society','http://topdocumentaryfilms.com/category/society/','tdindex','')
addDir('Sports','http://topdocumentaryfilms.com/category/sports/','tdindex','')
addDir('Technology','http://topdocumentaryfilms.com/category/technology/','tdindex','')
AUTO_VIEW('list')
def DOCNET():
addDir('Latest Documentaries','http://documentary.net/','docnetlatest','')
addDir('Catagories','http://documentary.net/','docnetcat','')
AUTO_VIEW('list')
def DOCLOG():
addDir('Latest Documentaries','http://www.documentary-log.com/','docloglatest','')
addDir('Catagories','http://www.documentary-log.com/','doclogcat','')
AUTO_VIEW('list')
def DOCSTORM():
addDir('Latest Documentaries','http://documentarystorm.com/','stormlatest','')
addDir('Catagories','http://documentarystorm.com/','stormcat','')
AUTO_VIEW('list')
#First Links from RSS
def TDINDEX(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('"postTitle"><a\nhref="(.+?)" title="(.+?)">').findall(link)
#matchimg=re.compile('src="(.+?)" class="alignleft').findall(link)
for url,name in match:
name =name.replace("'s","'s")
#for thumb in matchimg:
addDir(name,url,'tdvidpage','')
match=re.compile('rel="next" href="(.+?)"').findall(link)
if len(match) > 0:
addDir('Next Page',(match[0]),'tdindex',artPath+'next.png')
AUTO_VIEW('list')
# For Documentary.net
def DOCNETINDEX(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<a href="(.+?)" class=\'fix\'><img src="(.+?)" alt="(.+?)"').findall(link)
#matchimg=re.compile('src="(.+?)" class="alignleft').findall(link)
for url,iconimage,name in match:
name =name.replace("'s","'s")
#for thumb in matchimg:
addDir(name,url,'docnetvidpage',iconimage)
match=re.compile("<a class='page-numbers' href='(.+?)'>(.+?)</a>").findall(link)
for url, number in match:
if len(match) > 0:
addDir('Page'+number,'http://documentary.net'+url,'docnetindex',artPath+'next.png')
AUTO_VIEW('movies')
def DOCNETCAT(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<li><a href="(.+?)">(.+?)</a>').findall(link)
for url,name in match:
#for thumb in matchimg:
addDir(name,url,'docnetindex','')
AUTO_VIEW('list')
def DOCNETLATEST(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<a href="(.+?)" class=.+?><img src="(.+?)" alt="(.+?)" />').findall(link)
#matchimg=re.compile('src="(.+?)" class="alignleft').findall(link)
for url,iconimage,name in match:
name =name.replace("'s","'s")
name =name.replace("–","-")
#for thumb in matchimg:
addDir(name,url,'docnetvidpage',iconimage)
AUTO_VIEW('movies')
def STORMLATEST(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('class="cover">\r\n\t\t<a href="(.+?)" title="(.+?)" >\r\n \t\t\t<img width="198" height="297" src="(.+?)"').findall(link)
for url,name,iconimage in match:
addDir(name,url,'stormvidpage',iconimage)
AUTO_VIEW('movies')
def STORMCAT(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<a href="(.+?)" title=".+?">(.+?)</a></li>').findall(link)
for url,name in match:
addDir(name,url,'stormindex','')
AUTO_VIEW('list')
def STORMINDEX(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('class="cover">\r\n\t\t<a href="(.+?)" title="(.+?)" >\r\n \t\t\t<img width="198" height="297" src="(.+?)"').findall(link)
for url,name,iconimage in match:
name =name.replace("'s","'s")
name =name.replace("–","-")
addDir(name,url,'stormvidpage',iconimage)
match=re.compile('<link rel="next" href="(.+?)" />').findall(link)
if len(match) > 0:
addDir('Next Page',(match[0]),'stormindex',artPath+'next.png')
AUTO_VIEW('movies')
def STORMVIDPAGE(url,name):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
#match=re.compile('<p><iframe src="(.+?)" .+?" .+?"').findall(link)==========old links
match=re.compile('<p><iframe width=".+?" height=".+?" src="(.+?)" frameborder=".+?" ').findall(link)
if len(match) > 0:
for url in match:
if 'youtube' in url:
if 'http:' in url:
url = url.replace('http:','')
url = url.replace('//www.youtube.com/embed/','http://www.youtube.com/embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
else:
url = url.replace('//www.youtube.com/embed/','http://www.youtube.com/embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
#for Vimeo first page
#if len(match)<1:
if 'vimeo' in url:
#else:
#match=re.compile('<p><iframe src="(.+?)" .+?" .+?"').findall(link)
for url in match:
#url = url.replace('//player.vimeo.com/video/','http://player.vimeo.com/video/')
TDVIMEO(name,url,'')
AUTO_VIEW('movies')
else:
match=re.compile('<iframe class=".+?" width=".+?" src="(.+?)" frameborder=').findall(link)
for url in match:
TDVIMEO(name,url,'')
AUTO_VIEW('movies')
def DOCLOGCAT(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<li class=".+?"><a href="(.+?)" title=".+?">(.+?)</a>').findall(link)
for url,name in match:
addDir(name,url,'docloglatest','')
AUTO_VIEW('list')
def DOCLOGVIDPAGE(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<iframe.*?src="(http://.+?)".*?>').findall(link)
for url in match:
if 'youtube' in url:
url = url.replace('embed/','embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
#for Vimeo first page
#if len(match)<1:
if 'vimeo' in url:
#else:
#match=re.compile('"url":"(.+?)"').findall(link)
for url in match:
#url = url.replace('vimeo.com/moogaloop.swf?','player.vimeo.com/video/')
TDVIMEO(name,url,'')
AUTO_VIEW('movies')
def DOCLOGLATEST(url):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('<a href="(.+?)" title="(.+?)">\r\n <img src="(.+?)" alt=".+?" class="thumb"').findall(link)
#matchimg=re.compile('src="(.+?)" class="alignleft').findall(link)
for url,name,iconimage in match:
#for thumb in matchimg:
addDir(name,url,'doclogvidpage',iconimage)
match=re.compile("<a href='(.+?)' class='page larger'>(.+?)</a>").findall(link)
for url, number in match:
if len(match) > 0:
addDir('Page'+number,url,'docloglatest',artPath+'next.png')
AUTO_VIEW('movies')
# For Primary YouTube Listing
def TDVIDPAGE(url,name):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('width=".+?" height=".+?" src="(.+?)rel=0.+?"').findall(link)
for url in match:
if 'http:'in url:
url = url.replace('embed/','embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
else:
url = 'http:'+url
url = url.replace('embed/','embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
#for odd YT and Vimeo first page
if len(match)<1:
match=re.compile('width="530" height="325" src="(.+?)"').findall(link)
for url in match:
if 'youtube' in url:
url = 'http:'+url
url = url.replace('/embed/videoseries?list=','embed?=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
if 'vimeo' in url:
TDVIMEO(name,url,'')
AUTO_VIEW('movies')
#Scrape and Play TD Vimeo url
def TDVIMEO(name,url,iconimage):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('"url":"(.+?)","height":.+?,"width":.+?,').findall(link)
for url in match:
ok=True
liz=xbmcgui.ListItem(name, iconImage=iconimage,thumbnailImage=iconimage); liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=str(url),listitem=liz)
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Preparing Your Video,3000)")
xbmc.sleep(1000)
xbmc.Player ().play(str(url), liz, False)
AUTO_VIEW('')
# DocNet Start
def DOCNETVIDPAGE(url,name):
#link = net.http_GET(url).content
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match=re.compile('"embedURL" content="(.+?)?version').findall(link)
for url in match:
#url = url.replace('v/','embed?v=')
if 'youtube' in url:
url = url.replace('v/','embed?v=')
RESOLVE(name,url,'')
AUTO_VIEW('movies')
#for Vimeo first page
if len(match)<1:
#if 'vimeo' in url:
#else:
match=re.compile('"embedURL" content="(.+?)" />').findall(link)
for url in match:
url = url.replace('vimeo.com/moogaloop.swf?','player.vimeo.com/video/')
TDVIMEO(name,url,'')
AUTO_VIEW('movies')
# Second From Source to YT
def VIDEOLINKSYT(url,name):
link = net.http_GET(url).content
match=re.compile('<a class="title " href="(.+?)" tabindex="1"').findall(link)
for url in match:
movie_name = name[:-6]
year = name[-6:]
movie_name = movie_name.decode('UTF-8','ignore')
data = GRABMETA(movie_name,year)
thumb = data['cover_url']
RESOLVEYT(name,url,iconimage)
AUTO_VIEW('movies')
def RESOLVE(name,url,iconimage):
url = urlresolver.HostedMediaFile(url=url).resolve()
ok=True
liz=xbmcgui.ListItem(name, iconImage=iconimage,thumbnailImage=iconimage); liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=str(url),listitem=liz)
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Preparing Your Video,3000)")
xbmc.sleep(1000)
xbmc.Player ().play(str(url), liz, False)
AUTO_VIEW('')
#Resolve 2 forYouTube
def RESOLVEYT(name,url,iconimage):
url = urlresolver.HostedMediaFile(url=url).resolve()
ok=True
liz=xbmcgui.ListItem(name, iconImage=iconimage, thumbnailImage=iconimage); liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=liz)
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Preparing Your Video,3000)")
xbmc.sleep(1000)
xbmc.Player ().play(url, liz, False)
AUTO_VIEW('')
#Start Ketboard Function
def _get_keyboard( default="", heading="", hidden=False ):
""" shows a keyboard and returns a value """
keyboard = xbmc.Keyboard( default, heading, hidden )
keyboard.doModal()
if ( keyboard.isConfirmed() ):
return unicode( keyboard.getText(), "utf-8" )
return default
#Start Search Function
def SEARCH(url):
searchUrl = url
vq = _get_keyboard( heading="Searching DocuHub" )
# if blank or the user cancelled the keyboard, return
if ( not vq ): return False, 0
# we need to set the title to our query
title = urllib.quote_plus(vq)
searchUrl += title
print "Searching URL: " + searchUrl
INDEX(searchUrl)
AUTO_VIEW('movies')
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
# addLink for direct play
def addLink(name,url,iconimage):
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage);liz.setInfo('video',{'Title':name,'Genre':'Live','Studio':name})
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=liz)
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
xbmc.sleep(1000)
xbmc.Player (xbmc.PLAYER_CORE_PAPLAYER).play(url, liz, False)
return ok
# Standard addDir
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
xbmc.executebuiltin("Container.SetViewMode(500)")
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
#Alt addDir
def addDird(name,url,mode,iconimage,labels,favtype):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels=labels )
if favtype == 'movie':
contextMenuItems.append(('Movie Information', 'XBMC.Action(Info)'))
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
params=get_params()
url=None
name=None
mode=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
#May need toremove
#try:
# mode=int(params["mode"])
#except:
# pass
try:
mode=urllib.unquote_plus(params["mode"])
except:
pass
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
if mode==None or url==None or len(url)<1:
print ""
CATEGORIES()
elif mode=='topdoc':
print ""+url
TOPDOC()
elif mode=='docnetcat':
print ""+url
DOCNETCAT(url)
elif mode=='doclogcat':
print ""+url
DOCLOGCAT(url)
elif mode=='docnet':
print ""+url
DOCNET()
elif mode=='doclog':
print ""+url
DOCLOG()
elif mode=='tdindex':
print ""+url
TDINDEX(url)
elif mode=='docstorm':
print ""+url
DOCSTORM()
elif mode=='stormindex':
print ""+url
STORMINDEX(url)
elif mode=='stormvidpage':
print ""+url
STORMVIDPAGE(url,name)
elif mode=='stormlatest':
print ""+url
STORMLATEST(url)
elif mode=='stormcat':
print ""+url
STORMCAT(url)
elif mode=='tdvidpage':
print ""+url
TDVIDPAGE(url,name)
elif mode=='docnetindex':
print ""+url
DOCNETINDEX(url)
elif mode=='doclogvidpage':
print ""+url
DOCLOGVIDPAGE(url)
elif mode=='docnetlatest':
print ""+url
DOCNETLATEST(url)
elif mode=='docloglatest':
print ""+url
DOCLOGLATEST(url)
elif mode=='docnetvidpage':
print ""+url
DOCNETVIDPAGE(url,name)
elif mode=='videolinksyt':
print ""+url
VIDEOLINKSYT(url,name)
elif mode=='resolverSettings':
print ""+url
urlresolver.display_settings()
#For Search Function
elif mode==10:
print ""+url
SEARCH(url)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| [
"[email protected]"
]
| |
db6ac5fe2e00e73bf729e3846e1634923d5a9b37 | 2efe8116a5a60f5f7c46cf1b0ac598be49087942 | /EpsilonWebsite/EpsilonWebsite/wsgi.py | 672d83380fb4b4ce6e03f6b0705b39fb5abb00da | []
| no_license | SothanaV/EIweb | caaf8b9f844ebf28d0a45b7cceaf753277cbe1c7 | cedca00e74151a6ecb78da7b76d8888c9c94424b | refs/heads/master | 2021-09-04T04:19:30.124958 | 2018-01-15T18:32:47 | 2018-01-15T18:32:47 | 104,970,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | """
WSGI config for EpsilonWebsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "EpsilonWebsite.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
da7d289d9a5eb18c06ab6d897553543f1728130c | ad5c6daba04c8e04054085f96d36f5b167a09a37 | /src/lepl/stream/maxdepth.py | b5573f0cf84668787a590009e71e57130452fe94 | []
| no_license | nyimbi/LEPL | f49fee47a3c47d0291d2356e8a1e9b3120e32c05 | 0603505f187acc3c7da2e1a6083833a201f8b061 | refs/heads/master | 2021-04-15T13:40:32.860153 | 2018-03-26T14:00:25 | 2018-03-26T14:00:25 | 126,837,047 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,162 | py | # The contents of this file are subject to the Mozilla Public License
# (MPL) Version 1.1 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License
# at http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is LEPL (http://www.acooke.org/lepl)
# The Initial Developer of the Original Code is Andrew Cooke.
# Portions created by the Initial Developer are Copyright (C) 2009-2010
# Andrew Cooke ([email protected]). All Rights Reserved.
#
# Alternatively, the contents of this file may be used under the terms
# of the LGPL license (the GNU Lesser General Public License,
# http://www.gnu.org/licenses/lgpl.html), in which case the provisions
# of the LGPL License are applicable instead of those above.
#
# If you wish to allow use of your version of this file only under the
# terms of the LGPL License and not to allow others to use your version
# of this file under the MPL, indicate your decision by deleting the
# provisions above and replace them with the notice and other provisions
# required by the LGPL License. If you do not delete the provisions
# above, a recipient may use your version of this file under either the
# MPL or the LGPL License.
'''
Raise an exception if the stream is not consumed entirely.
'''
from lepl.stream.core import s_empty, s_fmt, s_deepest, s_next
from lepl.matchers.support import trampoline_matcher_factory
@trampoline_matcher_factory()
def FullFirstMatch(matcher, eos=True):
'''
Raise an exception if the first match fails (if eos=False) or does not
consume the entire input stream (eos=True). The exception includes
information about the location of the deepest match.
This only works for the first match because we cannot reset the stream
facade for subsequent matches (also, if you want multiple matches you
probably want more sophisticated error handling than this).
'''
def _matcher(support, stream1):
# set default maxdepth
s_next(stream1, count=0)
# first match
generator = matcher._match(stream1)
try:
(result2, stream2) = yield generator
if eos and not s_empty(stream2):
raise FullFirstMatchException(stream2)
else:
yield (result2, stream2)
except StopIteration:
raise FullFirstMatchException(stream1)
# subsequent matches:
while True:
result = yield generator
yield result
return _matcher
class FullFirstMatchException(Exception):
'''
The exception raised by `FullFirstMatch`. This includes information
about the deepest point read in the stream.
'''
def __init__(self, stream):
super(FullFirstMatchException, self).__init__(
s_fmt(s_deepest(stream),
'The match failed in {filename} at {rest} ({location}).'))
| [
"[email protected]"
]
| |
76376141faf3d7231ac68a1fbea4c36860e31d1a | 4e8a1750e6a9e7368c91bc9296fb1c1ff6b8f3ea | /unit08/exercise0806.py | af1899a476a88ac2dc06154710faf36c99c4c111 | []
| no_license | kevin510610/Book_AGuideToPython_Kaiching-Chang | 961dbd24aa1094664b9a9754f2882d4d7f964289 | 7db7cf8186e02f4210a01fbd4c454f0030b57022 | refs/heads/master | 2023-04-16T04:59:51.598236 | 2021-04-13T03:08:32 | 2021-04-13T03:08:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | def factorial(n):
i = 1
p = 1
while i <= n:
p *= i
i += 1
return p
n = int(input("n: "))
print(factorial(n))
# 檔名: exercise0806.py
# 作者: Kaiching Chang
# 時間: July, 2014
| [
"[email protected]"
]
| |
13541c0430b0d0bec4bee881aaa9ca0e0c84bf9a | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/XX-net_XX-Net/XX-Net-master/code/default/launcher/web_control.py | 4d28e5e98251a04e9c8b4a0c932feeb80b114c45 | []
| no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 20,275 | py | #!/usr/bin/env python
# coding:utf-8
import os, sys
current_path = os.path.dirname(os.path.abspath(__file__))
if __name__ == "__main__":
python_path = os.path.abspath( os.path.join(current_path, os.pardir, 'python27', '1.0'))
noarch_lib = os.path.abspath( os.path.join(python_path, 'lib', 'noarch'))
sys.path.append(noarch_lib)
import re
import socket, ssl
import urlparse
import threading
import urllib2
import time
root_path = os.path.abspath(os.path.join(current_path, os.pardir))
import yaml
import json
from instances import xlog
import module_init
import config
import autorun
import update_from_github
import simple_http_server
from simple_i18n import SimpleI18N
NetWorkIOError = (socket.error, ssl.SSLError, OSError)
i18n_translator = SimpleI18N(config.get(['language'], None))
module_menus = {}
class Http_Handler(simple_http_server.HttpServerHandler):
deploy_proc = None
def load_module_menus(self):
global module_menus
new_module_menus = {}
#config.load()
modules = config.get(['modules'], None)
for module in modules:
values = modules[module]
if module != "launcher" and config.get(["modules", module, "auto_start"], 0) != 1: # skip php_proxy module
continue
menu_path = os.path.join(root_path, module, "web_ui", "menu.yaml") # launcher & gae_proxy modules
if not os.path.isfile(menu_path):
continue
# i18n code lines (Both the locale dir & the template dir are module-dependent)
locale_dir = os.path.abspath(os.path.join(root_path, module, 'lang'))
stream = i18n_translator.render(locale_dir, menu_path)
module_menu = yaml.load(stream)
new_module_menus[module] = module_menu
module_menus = sorted(new_module_menus.iteritems(), key=lambda (k,v): (v['menu_sort_id']))
#for k,v in self.module_menus:
# xlog.debug("m:%s id:%d", k, v['menu_sort_id'])
def do_POST(self):
refer = self.headers.getheader('Referer')
if refer:
refer_loc = urlparse.urlparse(refer).netloc
host = self.headers.getheader('host')
if refer_loc != host:
xlog.warn("web control ref:%s host:%s", refer_loc, host)
return
#url_path = urlparse.urlparse(self.path).path
url_path_list = self.path.split('/')
if len(url_path_list) >= 3 and url_path_list[1] == "module":
module = url_path_list[2]
if len(url_path_list) >= 4 and url_path_list[3] == "control":
if module not in module_init.proc_handler:
xlog.warn("request %s no module in path", self.path)
self.send_not_found()
return
path = '/' + '/'.join(url_path_list[4:])
controler = module_init.proc_handler[module]["imp"].local.web_control.ControlHandler(self.client_address, self.headers, self.command, path, self.rfile, self.wfile)
controler.do_POST()
return
def do_GET(self):
refer = self.headers.getheader('Referer')
if refer:
refer_loc = urlparse.urlparse(refer).netloc
host = self.headers.getheader('host')
if refer_loc != host:
xlog.warn("web control ref:%s host:%s", refer_loc, host)
return
# check for '..', which will leak file
if re.search(r'(\.{2})', self.path) is not None:
self.wfile.write(b'HTTP/1.1 404\r\n\r\n')
xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path )
return
url_path = urlparse.urlparse(self.path).path
if url_path == '/':
return self.req_index_handler()
url_path_list = self.path.split('/')
if len(url_path_list) >= 3 and url_path_list[1] == "module":
module = url_path_list[2]
if len(url_path_list) >= 4 and url_path_list[3] == "control":
if module not in module_init.proc_handler:
xlog.warn("request %s no module in path", url_path)
self.send_not_found()
return
if "imp" not in module_init.proc_handler[module]:
xlog.warn("request module:%s start fail", module)
self.send_not_found()
return
path = '/' + '/'.join(url_path_list[4:])
controler = module_init.proc_handler[module]["imp"].local.web_control.ControlHandler(self.client_address, self.headers, self.command, path, self.rfile, self.wfile)
controler.do_GET()
return
else:
relate_path = '/'.join(url_path_list[3:])
file_path = os.path.join(root_path, module, "web_ui", relate_path)
if not os.path.isfile(file_path):
return self.send_not_found()
# i18n code lines (Both the locale dir & the template dir are module-dependent)
locale_dir = os.path.abspath(os.path.join(root_path, module, 'lang'))
content = i18n_translator.render(locale_dir, file_path)
return self.send_response('text/html', content)
else:
file_path = os.path.join(current_path, 'web_ui' + url_path)
xlog.debug ('launcher web_control %s %s %s ', self.address_string(), self.command, self.path)
if os.path.isfile(file_path):
if file_path.endswith('.js'):
mimetype = 'application/javascript'
elif file_path.endswith('.css'):
mimetype = 'text/css'
elif file_path.endswith('.html'):
mimetype = 'text/html'
elif file_path.endswith('.jpg'):
mimetype = 'image/jpeg'
elif file_path.endswith('.png'):
mimetype = 'image/png'
else:
mimetype = 'text/plain'
self.send_file(file_path, mimetype)
elif url_path == '/config':
self.req_config_handler()
elif url_path == '/update':
self.req_update_handler()
elif url_path == '/init_module':
self.req_init_module_handler()
elif url_path == '/quit':
self.send_response('text/html', '{"status":"success"}')
module_init.stop_all()
os._exit(0)
elif url_path == '/restart':
self.send_response('text/html', '{"status":"success"}')
update_from_github.restart_xxnet()
else:
self.send_not_found()
xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def req_index_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
try:
target_module = reqs['module'][0]
target_menu = reqs['menu'][0]
except:
if config.get(['modules', 'gae_proxy', 'auto_start'], 0) == 1:
target_module = 'gae_proxy'
target_menu = 'status'
else:
target_module = 'launcher'
target_menu = 'about'
if len(module_menus) == 0:
self.load_module_menus()
# i18n code lines (Both the locale dir & the template dir are module-dependent)
locale_dir = os.path.abspath(os.path.join(current_path, 'lang'))
index_content = i18n_translator.render(locale_dir, os.path.join(current_path, "web_ui", "index.html"))
current_version = update_from_github.current_version()
menu_content = ''
for module,v in module_menus:
#xlog.debug("m:%s id:%d", module, v['menu_sort_id'])
title = v["module_title"]
menu_content += '<li class="nav-header">%s</li>\n' % title
for sub_id in v['sub_menus']:
sub_title = v['sub_menus'][sub_id]['title']
sub_url = v['sub_menus'][sub_id]['url']
if target_module == module and target_menu == sub_url:
active = 'class="active"'
else:
active = ''
menu_content += '<li %s><a href="/?module=%s&menu=%s">%s</a></li>\n' % (active, module, sub_url, sub_title)
right_content_file = os.path.join(root_path, target_module, "web_ui", target_menu + ".html")
if os.path.isfile(right_content_file):
# i18n code lines (Both the locale dir & the template dir are module-dependent)
locale_dir = os.path.abspath(os.path.join(root_path, target_module, 'lang'))
right_content = i18n_translator.render(locale_dir, os.path.join(root_path, target_module, "web_ui", target_menu + ".html"))
else:
right_content = ""
data = (index_content.decode('utf-8') % (current_version, current_version, menu_content, right_content.decode('utf-8') )).encode('utf-8')
self.send_response('text/html', data)
def req_config_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
if reqs['cmd'] == ['get_config']:
config.load()
check_update = config.get(["update", "check_update"], 1)
if check_update == 0:
check_update = "dont-check"
elif check_update == 1:
check_update = "stable"
data = '{ "check_update": "%s", "language": "%s", "popup_webui": %d, "allow_remote_connect": %d, \
"show_systray": %d, "auto_start": %d, "show_detail": %d, "gae_proxy_enable": %d, "x_tunnel_enable": %d}' %\
(check_update
, config.get(["language"], i18n_translator.lang)
, config.get(["modules", "launcher", "popup_webui"], 1)
, config.get(["modules", "launcher", "allow_remote_connect"], 0)
, config.get(["modules", "launcher", "show_systray"], 1)
, config.get(["modules", "launcher", "auto_start"], 0)
, config.get(["modules", "gae_proxy", "show_detail"], 0)
, config.get(["modules", "gae_proxy", "auto_start"], 0)
, config.get(["modules", "x_tunnel", "auto_start"], 0)
)
elif reqs['cmd'] == ['set_config']:
if 'check_update' in reqs:
check_update = reqs['check_update'][0]
if check_update not in ["dont-check", "stable", "test"]:
data = '{"res":"fail, check_update:%s"}' % check_update
else:
config.set(["update", "check_update"], check_update)
config.save()
data = '{"res":"success"}'
elif 'language' in reqs:
language = reqs['language'][0]
if language not in i18n_translator.get_valid_languages():
data = '{"res":"fail, language:%s"}' % language
else:
config.set(["language"], language)
config.save()
i18n_translator.lang = language
self.load_module_menus()
data = '{"res":"success"}'
elif 'popup_webui' in reqs:
popup_webui = int(reqs['popup_webui'][0])
if popup_webui != 0 and popup_webui != 1:
data = '{"res":"fail, popup_webui:%s"}' % popup_webui
else:
config.set(["modules", "launcher", "popup_webui"], popup_webui)
config.save()
data = '{"res":"success"}'
elif 'allow_remote_connect' in reqs:
allow_remote_connect = int(reqs['allow_remote_connect'][0])
if allow_remote_connect != 0 and allow_remote_connect != 1:
data = '{"res":"fail, allow_remote_connect:%s"}' % allow_remote_connect
else:
config.set(["modules", "launcher", "allow_remote_connect"], allow_remote_connect)
config.save()
data = '{"res":"success"}'
xlog.debug("restart web control.")
stop()
time.sleep(1)
start()
xlog.debug("launcher web control restarted.")
elif 'show_systray' in reqs:
show_systray = int(reqs['show_systray'][0])
if show_systray != 0 and show_systray != 1:
data = '{"res":"fail, show_systray:%s"}' % show_systray
else:
config.set(["modules", "launcher", "show_systray"], show_systray)
config.save()
data = '{"res":"success"}'
elif 'auto_start' in reqs:
auto_start = int(reqs['auto_start'][0])
if auto_start != 0 and auto_start != 1:
data = '{"res":"fail, auto_start:%s"}' % auto_start
else:
if auto_start:
autorun.enable()
else:
autorun.disable()
config.set(["modules", "launcher", "auto_start"], auto_start)
config.save()
data = '{"res":"success"}'
elif 'show_detail' in reqs:
show_detail = int(reqs['show_detail'][0])
if show_detail != 0 and show_detail != 1:
data = '{"res":"fail, show_detail:%s"}' % show_detail
else:
config.set(["modules", "gae_proxy", "show_detail"], show_detail)
config.save()
data = '{"res":"success"}'
elif 'gae_proxy_enable' in reqs :
gae_proxy_enable = int(reqs['gae_proxy_enable'][0])
if gae_proxy_enable != 0 and gae_proxy_enable != 1:
data = '{"res":"fail, gae_proxy_enable:%s"}' % gae_proxy_enable
else:
config.set(["modules", "gae_proxy", "auto_start"], gae_proxy_enable)
config.save()
if gae_proxy_enable:
module_init.start("gae_proxy")
else:
module_init.stop("gae_proxy")
self.load_module_menus()
data = '{"res":"success"}'
elif 'x_tunnel_enable' in reqs :
x_tunnel_enable = int(reqs['x_tunnel_enable'][0])
if x_tunnel_enable != 0 and x_tunnel_enable != 1:
data = '{"res":"fail, x_tunnel_enable:%s"}' % x_tunnel_enable
else:
config.set(["modules", "x_tunnel", "auto_start"], x_tunnel_enable)
config.save()
if x_tunnel_enable:
module_init.start("x_tunnel")
else:
module_init.stop("x_tunnel")
self.load_module_menus()
data = '{"res":"success"}'
else:
data = '{"res":"fail"}'
self.send_response('text/html', data)
def req_update_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
if reqs['cmd'] == ['get_progress']:
data = json.dumps(update_from_github.progress)
elif reqs['cmd'] == ['get_new_version']:
current_version = update_from_github.current_version()
github_versions = update_from_github.get_github_versions()
data = '{"res":"success", "test_version":"%s", "stable_version":"%s", "current_version":"%s"}' % (github_versions[0][1], github_versions[1][1], current_version)
xlog.info("%s", data)
elif reqs['cmd'] == ['update_version']:
version = reqs['version'][0]
update_from_github.start_update_version(version)
data = '{"res":"success"}'
self.send_response('text/html', data)
def req_init_module_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
try:
module = reqs['module'][0]
config.load()
if reqs['cmd'] == ['start']:
result = module_init.start(module)
data = '{ "module": "%s", "cmd": "start", "result": "%s" }' % (module, result)
elif reqs['cmd'] == ['stop']:
result = module_init.stop(module)
data = '{ "module": "%s", "cmd": "stop", "result": "%s" }' % (module, result)
elif reqs['cmd'] == ['restart']:
result_stop = module_init.stop(module)
result_start = module_init.start(module)
data = '{ "module": "%s", "cmd": "restart", "stop_result": "%s", "start_result": "%s" }' % (module, result_stop, result_start)
except Exception as e:
xlog.exception("init_module except:%s", e)
self.send_response("text/html", data)
process = 0
server = 0
def start():
global process, server
# should use config.yaml to bind ip
allow_remote = config.get(["modules", "launcher", "allow_remote_connect"], 0)
host_port = config.get(["modules", "launcher", "control_port"], 8085)
if allow_remote:
host_addr = "0.0.0.0"
else:
host_addr = "127.0.0.1"
xlog.info("begin to start web control")
server = simple_http_server.HTTPServer((host_addr, host_port), Http_Handler)
process = threading.Thread(target=server.serve_forever)
process.setDaemon(True)
process.start()
xlog.info("launcher web control started.")
def stop():
global process, server
if process == 0:
return
xlog.info("begin to exit web control")
server.shutdown()
server.server_close()
process.join()
xlog.info("launcher web control exited.")
process = 0
def http_request(url, method="GET"):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
try:
req = opener.open(url, timeout=30)
return req
except Exception as e:
#xlog.exception("web_control http_request:%s fail:%s", url, e)
return False
def confirm_xxnet_exit():
"""suppose xxnet is running, try to close it
"""
is_xxnet_exit = False
xlog.debug("start confirm_xxnet_exit")
for i in range(30):
# gae_proxy(default port:8087)
if http_request("http://127.0.0.1:8087/quit") == False:
xlog.debug("good, xxnet:8087 cleared!")
is_xxnet_exit = True
break
else:
xlog.debug("<%d>: try to terminate xxnet:8087" % i)
time.sleep(1)
for i in range(30):
# web_control(default port:8085)
host_port = config.get(["modules", "launcher", "control_port"], 8085)
req_url = "http://127.0.0.1:{port}/quit".format(port=host_port)
if http_request(req_url) == False:
xlog.debug("good, xxnet:%s clear!" % host_port)
is_xxnet_exit = True
break
else:
xlog.debug("<%d>: try to terminate xxnet:%s" % (i, host_port))
time.sleep(1)
xlog.debug("finished confirm_xxnet_exit")
return is_xxnet_exit
def confirm_module_ready(port):
if port == 0:
xlog.error("confirm_module_ready with port 0")
time.sleep(1)
return False
for i in range(200):
req = http_request("http://127.0.0.1:%d/is_ready" % port)
if req == False:
time.sleep(1)
continue
content = req.read(1024)
req.close()
#xlog.debug("cert_import_ready return:%s", content)
if content == "True":
return True
else:
time.sleep(1)
return False
if __name__ == "__main__":
pass
#confirm_xxnet_exit()
# http_request("http://getbootstrap.com/dist/js/bootstrap.min.js")
| [
"[email protected]"
]
| |
0ae6c569d7ba64ecd69d11dfaa0d0a8135004962 | ca23b411c8a046e98f64b81f6cba9e47783d2584 | /factorize_a_city/libs/utils.py | 939a7aaaec7f7ab87928e4d456fe97833be80859 | [
"CC-BY-4.0",
"Apache-2.0"
]
| permissive | pdybczak/google-research | 1fb370a6aa4820a42a5d417a1915687a00613f9c | 0714e9a5a3934d922c0b9dd017943a8e511eb5bc | refs/heads/master | 2023-03-05T23:16:11.246574 | 2021-01-04T11:30:28 | 2021-01-04T11:30:28 | 326,629,357 | 1 | 0 | Apache-2.0 | 2021-02-01T12:39:09 | 2021-01-04T09:17:36 | Jupyter Notebook | UTF-8 | Python | false | false | 3,949 | py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utils ops to support the factorize_city project."""
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
layers = tf.contrib.layers
def outlier_normalization(inp, clip_amount=3):
"""Operation for normalizing numpy images with unbounded values.
This is used to normalize log_reflectance and log_shading images which have
unbounded values. This function bounds the min-max of the array to be
plus-minus clip_amount standard deviation of the mean. The clipped range is
then shifted to [0, 1].
Args:
inp: [H, W, 3] A numpy array with unbounded values.
clip_amount: (int) how many standard deviations from the mean to clip by.
Returns:
A tensor of shape [H, W, 3] with values ranging from [0, 1].
"""
sigma = np.std(inp)
mu = np.mean(inp)
inp = np.clip(inp, mu - clip_amount * sigma, mu + clip_amount * sigma)
m = inp - np.min(inp,)
return m / np.max(m)
def pad_panorama_for_convolutions(tensor, ksz, mode):
pad_top = (ksz - 1) // 2
pad_bottom = ksz // 2
pad_left = (ksz - 1) // 2
pad_right = ksz // 2
reflect_pad = [[0, 0], [pad_top, pad_bottom], [0, 0], [0, 0]]
tensor = tf.pad(tensor, reflect_pad, mode)
tensor = tf.concat(
[tensor[:, :, -pad_left:,], tensor, tensor[:, :, :pad_right]], axis=-2)
return tensor
def reduce_median(tensor, axis=0, keep_dims=False):
return tfp.stats.percentile(tensor, 50, axis=axis, keep_dims=keep_dims)
def upsample(tensor, size=2):
unused_b, h, w, unused_d = tensor.shape.as_list()
return tf.compat.v1.image.resize_bilinear(
tensor, [size * h, size * w],
align_corners=False,
half_pixel_centers=True)
def instance_normalization(inp, scope=""):
with tf.compat.v1.variable_scope(scope):
return layers.instance_norm(
inp, center=True, scale=True, trainable=True, epsilon=1e-5)
def compute_circular_average(softmax_distribution):
"""Computes circular average of a batch of softmax_distribution.
Args:
softmax_distribution: [B, K] is a batch of distributions of angles over K
bins which spans [-pi, pi]. Each bin contains the probability of an
orientation in its corresponding angle direction.
Returns:
Circular average, in radians, of shape [B] for each distribution of K-bins.
"""
unused_batch_size, k_bins = softmax_distribution.shape.as_list()
radian_coordinates = tf.linspace(-np.pi, np.pi,
k_bins + 1)[:k_bins] + (np.pi) / k_bins
# Imagine a top-down view of the scene, where the x-axis points out the center
# of the panorama and the +y axis is clockwise.
x_vector_direction = tf.cos(radian_coordinates)
y_vector_direction = tf.sin(radian_coordinates)
expected_x_coordinate = tf.reduce_sum(
softmax_distribution * x_vector_direction[tf.newaxis], axis=-1)
expected_y_coordinate = tf.reduce_sum(
softmax_distribution * y_vector_direction[tf.newaxis], axis=-1)
# Project the circular average to the unit circle to prevent unstable
# expoding gradients when the average is close to the origin of the
# coordinate frame.
dist = tf.sqrt(expected_x_coordinate * expected_x_coordinate +
expected_y_coordinate * expected_y_coordinate + 1e-5)
normx = expected_x_coordinate / dist
normy = expected_y_coordinate / dist
return tf.atan2(normy, normx)
| [
"[email protected]"
]
| |
9bf9a8fa4b7511ee7ddec1c52b7f7f7cc9c701c9 | fb5d2c4c76b311871b23c1d7266f074d4a709ef6 | /plotting/plot_ideal_dlogp.py | e57cb71de188c520b98ce28fb111ff526349a289 | [
"AFL-3.0"
]
| permissive | philbull/RadioFisher | 50be8d49d7bdde2712bd35682a359c43f22e3a28 | fe25f969de9a700c5697168ba9e0d2645c55ed81 | refs/heads/master | 2023-01-20T01:27:39.982180 | 2020-11-24T07:44:51 | 2020-11-24T07:44:51 | 315,553,003 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,193 | py | #!/usr/bin/python
"""
Plot fractional constraints on P(k) for Euclid and noise-free versions of
Facility with different amounts of foreground contamination. (Fig. 27)
"""
import numpy as np
import pylab as P
from rfwrapper import rf
import matplotlib.patches
import matplotlib.cm
import os
from radiofisher import euclid
cosmo = rf.experiments.cosmo
names = ['EuclidRef_paper', 'exptCV_efg6_paper', 'exptCV_efg12_paper']
colours = ['#CC0000', '#1619A1', '#5B9C0A', '#990A9C'] # DETF/F/M/S
labels = ['DETF IV', 'Ideal, $\epsilon_\mathrm{FG}=10^{-6}$',
'Ideal, $\epsilon_\mathrm{FG}=10^{-12}$']
linestyle = [[], [8, 4], [2, 4, 6, 4], [3, 4]]
# Get f_bao(k) function
cosmo = rf.load_power_spectrum(cosmo, "cache_pk.dat", force_load=True)
fbao = cosmo['fbao']
# Fiducial value and plotting
P.subplot(111)
for k in range(len(names)):
root = "output/" + names[k]
# Load cosmo fns.
dat = np.atleast_2d( np.genfromtxt(root+"-cosmofns-zc.dat") ).T
zc, Hc, dAc, Dc, fc = dat
z, H, dA, D, f = np.genfromtxt(root+"-cosmofns-smooth.dat").T
kc = np.genfromtxt(root+"-fisher-kc.dat").T
# Load Fisher matrices as fn. of z
Nbins = zc.size
F_list = [np.genfromtxt(root+"-fisher-full-%d.dat" % i) for i in range(Nbins)]
# EOS FISHER MATRIX
# Actually, (aperp, apar) are (D_A, H)
pnames = rf.load_param_names(root+"-fisher-full-0.dat")
zfns = []; excl = []
F, lbls = rf.combined_fisher_matrix( F_list,
expand=zfns, names=pnames,
exclude=excl )
# Just do the simplest thing for P(k) and get 1/sqrt(F)
cov = [np.sqrt(1. / np.diag(F)[lbls.index(lbl)]) for lbl in lbls if "pk" in lbl]
cov = np.array(cov)
pk = cosmo['pk_nobao'](kc) * (1. + fbao(kc))
# Replace nan/inf values
cov[np.where(np.isnan(cov))] = 1e10
cov[np.where(np.isinf(cov))] = 1e10
pw0 = rf.indexes_for_sampled_fns(11, zc.size, zfns)
pwa = rf.indexes_for_sampled_fns(12, zc.size, zfns)
print "-"*50
print names[k]
#print cov
print lbls[pw0], 1. / np.sqrt(F[pw0,pw0])
print lbls[pwa], 1. / np.sqrt(F[pwa,pwa])
"""
if k == 0:
# Plot shaded region
P.fill_between(kc, np.ones(kc.size)*1e-10, cov, facecolor='#e1e1e1', edgecolor='none')
else:
# Plot errorbars
P.plot(kc, cov, color=colours[k], label=labels[k], lw=2.2, ls=linestyle[k])
"""
line = P.plot(kc, cov, color=colours[k], label=labels[k], lw=2.4)
# Set custom linestyle
line[0].set_dashes(linestyle[k])
P.xscale('log')
P.yscale('log')
P.xlim((1e-3, 1.5e0))
P.ylim((8e-4, 1e1))
P.legend(loc='lower left', prop={'size':'large'}, frameon=False)
P.tick_params(axis='both', which='major', labelsize=20, size=8., width=1.5, pad=8.)
P.tick_params(axis='both', which='minor', labelsize=20, size=5., width=1.5)
P.xlabel(r"$k \,[\mathrm{Mpc}^{-1}]$", fontdict={'fontsize':'xx-large'})
P.ylabel(r"$\Delta P / P$", fontdict={'fontsize':'xx-large'})
P.tight_layout()
# Set size
#P.gcf().set_size_inches(8.,6.)
P.savefig('fig27-dlogp-ideal.pdf', transparent=True) # 100
P.show()
| [
"[email protected]"
]
| |
f0b2ebaf72776e0d44d6bcd2b5874668d37c3582 | 8bd3229c4f07243c5756a029f507235e49221d21 | /Store/src/products/migrations/0007_remove_category_catname2.py | 5b5fc3c56ba8de8ea514074faf7386f03e2a398b | []
| no_license | ammaralazie/Online-store | 4d937cbd022c36f9f671593e9e6122edce262f54 | 51e6a8518ab52ce9e6bb589cce31876c944fc191 | refs/heads/master | 2023-02-12T14:51:55.088522 | 2021-01-09T04:04:14 | 2021-01-09T04:04:14 | 304,319,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | # Generated by Django 3.1 on 2020-08-24 13:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0006_auto_20200824_1308'),
]
operations = [
migrations.RemoveField(
model_name='category',
name='CATName2',
),
]
| [
"[email protected]"
]
| |
f3523dde04d19cfbb77789c443ba224da4bdcd25 | 5456502f97627278cbd6e16d002d50f1de3da7bb | /chromeos/DEPS | 757faf1e910c622d4ec213d9ea619db55cb922fd | [
"BSD-3-Clause"
]
| permissive | TrellixVulnTeam/Chromium_7C66 | 72d108a413909eb3bd36c73a6c2f98de1573b6e5 | c8649ab2a0f5a747369ed50351209a42f59672ee | refs/heads/master | 2023-03-16T12:51:40.231959 | 2017-12-20T10:38:26 | 2017-12-20T10:38:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | # Please keep the dependencies here to a minimum. This is intended to be a
# low level Chrome OS system library that may be used by targets that need to
# be kept as small as possible.
include_rules = [
"+components/device_event_log",
"+components/policy/proto",
"+components/pref_registry",
"+components/prefs",
"+components/signin/core/account_id/account_id.h",
"+components/user_manager/known_user.h",
"+crypto",
"+net",
"+third_party/cros_system_api",
"+third_party/libxml",
"+third_party/protobuf",
# Some targets may not have any UI, so explictly exclude src/ui.
"-ui",
]
| [
"[email protected]"
]
| ||
7ce3edaa3f5528687a51a5632a5bf3a96b5872cf | 5e4d6df8fa464f4270855846bf0708ae24d4a572 | /blabla/0625-0701_Antai_src/src/0626/1003time.py | 54ff79984180c1e77ca402b85bd50491051ff27a | []
| no_license | xy2333/tianchi-CBE | 2455796a7241db65ef42dd4f00c6a13fb462f246 | f169d21290e25ccf65bb7a0040e83cb9ef5f9dc2 | refs/heads/master | 2020-06-12T12:09:59.295929 | 2019-08-05T03:43:18 | 2019-08-05T03:43:18 | 194,294,624 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,526 | py | #encoding=utf-8
# 将train表的数据整理出四个is_morning,afternoon,night,midnight等
t1 = '2018-07-23 17:58:40' # 星期1
t2 = '2018-07-24 17:58:40' # 星期2
t3 = '2018-07-25 17:58:40' # 星期3
t4 = '2018-07-26 17:58:40' # 星期4
t5 = '2018-07-27 10:58:40' # 星期5
t6 = '2018-07-28 17:58:40' # 星期6
t7 = '2018-07-29 17:58:40' # 星期7
t8 = '2018-07-30 17:58:40' # 星期7
import time
t_1 = time.strptime(t1,"%Y-%m-%d %H:%M:%S")
t_2 = time.strptime(t2,"%Y-%m-%d %H:%M:%S")
t_3 = time.strptime(t3,"%Y-%m-%d %H:%M:%S")
t_4 = time.strptime(t4,"%Y-%m-%d %H:%M:%S")
t_5 = time.strptime(t5,"%Y-%m-%d %H:%M:%S")
t_6 = time.strptime(t6,"%Y-%m-%d %H:%M:%S")
t_7 = time.strptime(t7,"%Y-%m-%d %H:%M:%S")
t_8 = time.strptime(t8,"%Y-%m-%d %H:%M:%S")
# is weekday
# def is_weekday(dt):
# if dt.tm_wday<5:
# print "Weekday"
# else:
# print "Weekend"
# print is_weekday(t_1)
# print is_weekday(t_2)
# print is_weekday(t_3)
# print is_weekday(t_4)
# print is_weekday(t_5)
# is morning
def is_morning(dt): # 5:00-11:00
if 4 < dt.tm_hour and dt.tm_hour < 12:
return 1
else:
return 0
def is_afternoon(dt): # 11:00-17:00
if 10 < dt.tm_hour and dt.tm_hour < 18:
return 1
else:
return 0
def is_night(dt): # 17:00-23:00
if 16 < dt.tm_hour and dt.tm_hour < 24:
return 1
else:
return 0
def is_midnight(dt): # 23:00-5:00
if dt < 5 or dt.tm_hour == 24:
return 1
else:
return 0
is_morning(t_5)
print ""
| [
"[email protected]"
]
| |
7702163683b044efbe23b7af136f4ad89296307d | fdae0a312a3d6bab4c0da379f8989de87ccc8ce2 | /coding.py | fe00cb44d43a609afa6cfb1197c78b3967131ea7 | []
| no_license | Yeldan/Lab2 | a6d65f85b7d164d5dbcee3a73ffaf5df89caa01e | ef25f83da60c5f35fba7cc7fa99b67635d67234f | refs/heads/master | 2021-01-21T10:13:43.927844 | 2017-05-18T10:56:34 | 2017-05-18T10:56:34 | 91,682,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | bytes = str.encode('utf-8')
str = bytes.decode('utf-8')
| [
"[email protected]"
]
| |
c5529a4177d33ed7a6fe2ab0dbe822d9a27ee8fd | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/contrib/crf/python/ops/crf.py | ed0a049ef812b379f4d8f0e4e48e561debf6a71a | [
"Apache-2.0"
]
| permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 24,184 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module for constructing a linear-chain CRF.
The following snippet is an example of a CRF layer on top of a batched sequence
of unary scores (logits for every word). This example also decodes the most
likely sequence at test time. There are two ways to do decoding. One
is using crf_decode to do decoding in Tensorflow , and the other one is using
viterbi_decode in Numpy.
log_likelihood, transition_params = tf.contrib.crf.crf_log_likelihood(
unary_scores, gold_tags, sequence_lengths)
loss = tf.reduce_mean(-log_likelihood)
train_op = tf.compat.v1.train.GradientDescentOptimizer(0.01).minimize(loss)
# Decoding in Tensorflow.
viterbi_sequence, viterbi_score = tf.contrib.crf.crf_decode(
unary_scores, transition_params, sequence_lengths)
tf_viterbi_sequence, tf_viterbi_score, _ = session.run(
[viterbi_sequence, viterbi_score, train_op])
# Decoding in Numpy.
tf_unary_scores, tf_sequence_lengths, tf_transition_params, _ = session.run(
[unary_scores, sequence_lengths, transition_params, train_op])
for tf_unary_scores_, tf_sequence_length_ in zip(tf_unary_scores,
tf_sequence_lengths):
# Remove padding.
tf_unary_scores_ = tf_unary_scores_[:tf_sequence_length_]
# Compute the highest score and its tag sequence.
tf_viterbi_sequence, tf_viterbi_score = tf.contrib.crf.viterbi_decode(
tf_unary_scores_, tf_transition_params)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope as vs
__all__ = [
"crf_sequence_score", "crf_log_norm", "crf_log_likelihood",
"crf_unary_score", "crf_binary_score", "CrfForwardRnnCell",
"viterbi_decode", "crf_decode", "CrfDecodeForwardRnnCell",
"CrfDecodeBackwardRnnCell", "crf_multitag_sequence_score"
]
def crf_sequence_score(inputs, tag_indices, sequence_lengths,
transition_params):
"""Computes the unnormalized score for a tag sequence.
Args:
inputs: A [batch_size, max_seq_len, num_tags] tensor of unary potentials
to use as input to the CRF layer.
tag_indices: A [batch_size, max_seq_len] matrix of tag indices for which we
compute the unnormalized score.
sequence_lengths: A [batch_size] vector of true sequence lengths.
transition_params: A [num_tags, num_tags] transition matrix.
Returns:
sequence_scores: A [batch_size] vector of unnormalized sequence scores.
"""
# If max_seq_len is 1, we skip the score calculation and simply gather the
# unary potentials of the single tag.
def _single_seq_fn():
batch_size = array_ops.shape(inputs, out_type=tag_indices.dtype)[0]
example_inds = array_ops.reshape(
math_ops.range(batch_size, dtype=tag_indices.dtype), [-1, 1])
sequence_scores = array_ops.gather_nd(
array_ops.squeeze(inputs, [1]),
array_ops.concat([example_inds, tag_indices], axis=1))
sequence_scores = array_ops.where(math_ops.less_equal(sequence_lengths, 0),
array_ops.zeros_like(sequence_scores),
sequence_scores)
return sequence_scores
def _multi_seq_fn():
# Compute the scores of the given tag sequence.
unary_scores = crf_unary_score(tag_indices, sequence_lengths, inputs)
binary_scores = crf_binary_score(tag_indices, sequence_lengths,
transition_params)
sequence_scores = unary_scores + binary_scores
return sequence_scores
return utils.smart_cond(
pred=math_ops.equal(
tensor_shape.dimension_value(
inputs.shape[1]) or array_ops.shape(inputs)[1],
1),
true_fn=_single_seq_fn,
false_fn=_multi_seq_fn)
def crf_multitag_sequence_score(inputs, tag_bitmap, sequence_lengths,
transition_params):
"""Computes the unnormalized score of all tag sequences matching tag_bitmap.
tag_bitmap enables more than one tag to be considered correct at each time
step. This is useful when an observed output at a given time step is
consistent with more than one tag, and thus the log likelihood of that
observation must take into account all possible consistent tags.
Using one-hot vectors in tag_bitmap gives results identical to
crf_sequence_score.
Args:
inputs: A [batch_size, max_seq_len, num_tags] tensor of unary potentials
to use as input to the CRF layer.
tag_bitmap: A [batch_size, max_seq_len, num_tags] boolean tensor
representing all active tags at each index for which to calculate the
unnormalized score.
sequence_lengths: A [batch_size] vector of true sequence lengths.
transition_params: A [num_tags, num_tags] transition matrix.
Returns:
sequence_scores: A [batch_size] vector of unnormalized sequence scores.
"""
# If max_seq_len is 1, we skip the score calculation and simply gather the
# unary potentials of all active tags.
def _single_seq_fn():
filtered_inputs = array_ops.where(
tag_bitmap, inputs,
array_ops.fill(array_ops.shape(inputs), float("-inf")))
return math_ops.reduce_logsumexp(
filtered_inputs, axis=[1, 2], keepdims=False)
def _multi_seq_fn():
# Compute the logsumexp of all scores of sequences matching the given tags.
filtered_inputs = array_ops.where(
tag_bitmap, inputs,
array_ops.fill(array_ops.shape(inputs), float("-inf")))
return crf_log_norm(
inputs=filtered_inputs,
sequence_lengths=sequence_lengths,
transition_params=transition_params)
return utils.smart_cond(
pred=math_ops.equal(
tensor_shape.dimension_value(
inputs.shape[1]) or array_ops.shape(inputs)[1],
1),
true_fn=_single_seq_fn,
false_fn=_multi_seq_fn)
def crf_log_norm(inputs, sequence_lengths, transition_params):
"""Computes the normalization for a CRF.
Args:
inputs: A [batch_size, max_seq_len, num_tags] tensor of unary potentials
to use as input to the CRF layer.
sequence_lengths: A [batch_size] vector of true sequence lengths.
transition_params: A [num_tags, num_tags] transition matrix.
Returns:
log_norm: A [batch_size] vector of normalizers for a CRF.
"""
# Split up the first and rest of the inputs in preparation for the forward
# algorithm.
first_input = array_ops.slice(inputs, [0, 0, 0], [-1, 1, -1])
first_input = array_ops.squeeze(first_input, [1])
# If max_seq_len is 1, we skip the algorithm and simply reduce_logsumexp over
# the "initial state" (the unary potentials).
def _single_seq_fn():
log_norm = math_ops.reduce_logsumexp(first_input, [1])
# Mask `log_norm` of the sequences with length <= zero.
log_norm = array_ops.where(math_ops.less_equal(sequence_lengths, 0),
array_ops.zeros_like(log_norm),
log_norm)
return log_norm
def _multi_seq_fn():
"""Forward computation of alpha values."""
rest_of_input = array_ops.slice(inputs, [0, 1, 0], [-1, -1, -1])
# Compute the alpha values in the forward algorithm in order to get the
# partition function.
forward_cell = CrfForwardRnnCell(transition_params)
# Sequence length is not allowed to be less than zero.
sequence_lengths_less_one = math_ops.maximum(
constant_op.constant(0, dtype=sequence_lengths.dtype),
sequence_lengths - 1)
_, alphas = rnn.dynamic_rnn(
cell=forward_cell,
inputs=rest_of_input,
sequence_length=sequence_lengths_less_one,
initial_state=first_input,
dtype=dtypes.float32)
log_norm = math_ops.reduce_logsumexp(alphas, [1])
# Mask `log_norm` of the sequences with length <= zero.
log_norm = array_ops.where(math_ops.less_equal(sequence_lengths, 0),
array_ops.zeros_like(log_norm),
log_norm)
return log_norm
return utils.smart_cond(
pred=math_ops.equal(
tensor_shape.dimension_value(
inputs.shape[1]) or array_ops.shape(inputs)[1],
1),
true_fn=_single_seq_fn,
false_fn=_multi_seq_fn)
def crf_log_likelihood(inputs,
tag_indices,
sequence_lengths,
transition_params=None):
"""Computes the log-likelihood of tag sequences in a CRF.
Args:
inputs: A [batch_size, max_seq_len, num_tags] tensor of unary potentials
to use as input to the CRF layer.
tag_indices: A [batch_size, max_seq_len] matrix of tag indices for which we
compute the log-likelihood.
sequence_lengths: A [batch_size] vector of true sequence lengths.
transition_params: A [num_tags, num_tags] transition matrix, if available.
Returns:
log_likelihood: A [batch_size] `Tensor` containing the log-likelihood of
each example, given the sequence of tag indices.
transition_params: A [num_tags, num_tags] transition matrix. This is either
provided by the caller or created in this function.
"""
# Get shape information.
num_tags = tensor_shape.dimension_value(inputs.shape[2])
# Get the transition matrix if not provided.
if transition_params is None:
transition_params = vs.get_variable("transitions", [num_tags, num_tags])
sequence_scores = crf_sequence_score(inputs, tag_indices, sequence_lengths,
transition_params)
log_norm = crf_log_norm(inputs, sequence_lengths, transition_params)
# Normalize the scores to get the log-likelihood per example.
log_likelihood = sequence_scores - log_norm
return log_likelihood, transition_params
def crf_unary_score(tag_indices, sequence_lengths, inputs):
"""Computes the unary scores of tag sequences.
Args:
tag_indices: A [batch_size, max_seq_len] matrix of tag indices.
sequence_lengths: A [batch_size] vector of true sequence lengths.
inputs: A [batch_size, max_seq_len, num_tags] tensor of unary potentials.
Returns:
unary_scores: A [batch_size] vector of unary scores.
"""
batch_size = array_ops.shape(inputs)[0]
max_seq_len = array_ops.shape(inputs)[1]
num_tags = array_ops.shape(inputs)[2]
flattened_inputs = array_ops.reshape(inputs, [-1])
offsets = array_ops.expand_dims(
math_ops.range(batch_size) * max_seq_len * num_tags, 1)
offsets += array_ops.expand_dims(math_ops.range(max_seq_len) * num_tags, 0)
# Use int32 or int64 based on tag_indices' dtype.
if tag_indices.dtype == dtypes.int64:
offsets = math_ops.cast(offsets, dtypes.int64)
flattened_tag_indices = array_ops.reshape(offsets + tag_indices, [-1])
unary_scores = array_ops.reshape(
array_ops.gather(flattened_inputs, flattened_tag_indices),
[batch_size, max_seq_len])
masks = array_ops.sequence_mask(sequence_lengths,
maxlen=array_ops.shape(tag_indices)[1],
dtype=dtypes.float32)
unary_scores = math_ops.reduce_sum(unary_scores * masks, 1)
return unary_scores
def crf_binary_score(tag_indices, sequence_lengths, transition_params):
"""Computes the binary scores of tag sequences.
Args:
tag_indices: A [batch_size, max_seq_len] matrix of tag indices.
sequence_lengths: A [batch_size] vector of true sequence lengths.
transition_params: A [num_tags, num_tags] matrix of binary potentials.
Returns:
binary_scores: A [batch_size] vector of binary scores.
"""
# Get shape information.
num_tags = transition_params.get_shape()[0]
num_transitions = array_ops.shape(tag_indices)[1] - 1
# Truncate by one on each side of the sequence to get the start and end
# indices of each transition.
start_tag_indices = array_ops.slice(tag_indices, [0, 0],
[-1, num_transitions])
end_tag_indices = array_ops.slice(tag_indices, [0, 1], [-1, num_transitions])
# Encode the indices in a flattened representation.
flattened_transition_indices = start_tag_indices * num_tags + end_tag_indices
flattened_transition_params = array_ops.reshape(transition_params, [-1])
# Get the binary scores based on the flattened representation.
binary_scores = array_ops.gather(flattened_transition_params,
flattened_transition_indices)
masks = array_ops.sequence_mask(sequence_lengths,
maxlen=array_ops.shape(tag_indices)[1],
dtype=dtypes.float32)
truncated_masks = array_ops.slice(masks, [0, 1], [-1, -1])
binary_scores = math_ops.reduce_sum(binary_scores * truncated_masks, 1)
return binary_scores
class CrfForwardRnnCell(rnn_cell.RNNCell):
"""Computes the alpha values in a linear-chain CRF.
See http://www.cs.columbia.edu/~mcollins/fb.pdf for reference.
"""
def __init__(self, transition_params):
"""Initialize the CrfForwardRnnCell.
Args:
transition_params: A [num_tags, num_tags] matrix of binary potentials.
This matrix is expanded into a [1, num_tags, num_tags] in preparation
for the broadcast summation occurring within the cell.
"""
self._transition_params = array_ops.expand_dims(transition_params, 0)
self._num_tags = tensor_shape.dimension_value(transition_params.shape[0])
@property
def state_size(self):
return self._num_tags
@property
def output_size(self):
return self._num_tags
def __call__(self, inputs, state, scope=None):
"""Build the CrfForwardRnnCell.
Args:
inputs: A [batch_size, num_tags] matrix of unary potentials.
state: A [batch_size, num_tags] matrix containing the previous alpha
values.
scope: Unused variable scope of this cell.
Returns:
new_alphas, new_alphas: A pair of [batch_size, num_tags] matrices
values containing the new alpha values.
"""
state = array_ops.expand_dims(state, 2)
# This addition op broadcasts self._transitions_params along the zeroth
# dimension and state along the second dimension. This performs the
# multiplication of previous alpha values and the current binary potentials
# in log space.
transition_scores = state + self._transition_params
new_alphas = inputs + math_ops.reduce_logsumexp(transition_scores, [1])
# Both the state and the output of this RNN cell contain the alphas values.
# The output value is currently unused and simply satisfies the RNN API.
# This could be useful in the future if we need to compute marginal
# probabilities, which would require the accumulated alpha values at every
# time step.
return new_alphas, new_alphas
def viterbi_decode(score, transition_params):
"""Decode the highest scoring sequence of tags outside of TensorFlow.
This should only be used at test time.
Args:
score: A [seq_len, num_tags] matrix of unary potentials.
transition_params: A [num_tags, num_tags] matrix of binary potentials.
Returns:
viterbi: A [seq_len] list of integers containing the highest scoring tag
indices.
viterbi_score: A float containing the score for the Viterbi sequence.
"""
trellis = np.zeros_like(score)
backpointers = np.zeros_like(score, dtype=np.int32)
trellis[0] = score[0]
for t in range(1, score.shape[0]):
v = np.expand_dims(trellis[t - 1], 1) + transition_params
trellis[t] = score[t] + np.max(v, 0)
backpointers[t] = np.argmax(v, 0)
viterbi = [np.argmax(trellis[-1])]
for bp in reversed(backpointers[1:]):
viterbi.append(bp[viterbi[-1]])
viterbi.reverse()
viterbi_score = np.max(trellis[-1])
return viterbi, viterbi_score
class CrfDecodeForwardRnnCell(rnn_cell.RNNCell):
"""Computes the forward decoding in a linear-chain CRF.
"""
def __init__(self, transition_params):
"""Initialize the CrfDecodeForwardRnnCell.
Args:
transition_params: A [num_tags, num_tags] matrix of binary
potentials. This matrix is expanded into a
[1, num_tags, num_tags] in preparation for the broadcast
summation occurring within the cell.
"""
self._transition_params = array_ops.expand_dims(transition_params, 0)
self._num_tags = tensor_shape.dimension_value(transition_params.shape[0])
@property
def state_size(self):
return self._num_tags
@property
def output_size(self):
return self._num_tags
def __call__(self, inputs, state, scope=None):
"""Build the CrfDecodeForwardRnnCell.
Args:
inputs: A [batch_size, num_tags] matrix of unary potentials.
state: A [batch_size, num_tags] matrix containing the previous step's
score values.
scope: Unused variable scope of this cell.
Returns:
backpointers: A [batch_size, num_tags] matrix of backpointers.
new_state: A [batch_size, num_tags] matrix of new score values.
"""
# For simplicity, in shape comments, denote:
# 'batch_size' by 'B', 'max_seq_len' by 'T' , 'num_tags' by 'O' (output).
state = array_ops.expand_dims(state, 2) # [B, O, 1]
# This addition op broadcasts self._transitions_params along the zeroth
# dimension and state along the second dimension.
# [B, O, 1] + [1, O, O] -> [B, O, O]
transition_scores = state + self._transition_params # [B, O, O]
new_state = inputs + math_ops.reduce_max(transition_scores, [1]) # [B, O]
backpointers = math_ops.argmax(transition_scores, 1)
backpointers = math_ops.cast(backpointers, dtype=dtypes.int32) # [B, O]
return backpointers, new_state
class CrfDecodeBackwardRnnCell(rnn_cell.RNNCell):
"""Computes backward decoding in a linear-chain CRF.
"""
def __init__(self, num_tags):
"""Initialize the CrfDecodeBackwardRnnCell.
Args:
num_tags: An integer. The number of tags.
"""
self._num_tags = num_tags
@property
def state_size(self):
return 1
@property
def output_size(self):
return 1
def __call__(self, inputs, state, scope=None):
"""Build the CrfDecodeBackwardRnnCell.
Args:
inputs: A [batch_size, num_tags] matrix of
backpointer of next step (in time order).
state: A [batch_size, 1] matrix of tag index of next step.
scope: Unused variable scope of this cell.
Returns:
new_tags, new_tags: A pair of [batch_size, num_tags]
tensors containing the new tag indices.
"""
state = array_ops.squeeze(state, axis=[1]) # [B]
batch_size = array_ops.shape(inputs)[0]
b_indices = math_ops.range(batch_size) # [B]
indices = array_ops.stack([b_indices, state], axis=1) # [B, 2]
new_tags = array_ops.expand_dims(
gen_array_ops.gather_nd(inputs, indices), # [B]
axis=-1) # [B, 1]
return new_tags, new_tags
def crf_decode(potentials, transition_params, sequence_length):
"""Decode the highest scoring sequence of tags in TensorFlow.
This is a function for tensor.
Args:
potentials: A [batch_size, max_seq_len, num_tags] tensor of
unary potentials.
transition_params: A [num_tags, num_tags] matrix of
binary potentials.
sequence_length: A [batch_size] vector of true sequence lengths.
Returns:
decode_tags: A [batch_size, max_seq_len] matrix, with dtype `tf.int32`.
Contains the highest scoring tag indices.
best_score: A [batch_size] vector, containing the score of `decode_tags`.
"""
# If max_seq_len is 1, we skip the algorithm and simply return the argmax tag
# and the max activation.
def _single_seq_fn():
squeezed_potentials = array_ops.squeeze(potentials, [1])
decode_tags = array_ops.expand_dims(
math_ops.argmax(squeezed_potentials, axis=1), 1)
best_score = math_ops.reduce_max(squeezed_potentials, axis=1)
return math_ops.cast(decode_tags, dtype=dtypes.int32), best_score
def _multi_seq_fn():
"""Decoding of highest scoring sequence."""
# For simplicity, in shape comments, denote:
# 'batch_size' by 'B', 'max_seq_len' by 'T' , 'num_tags' by 'O' (output).
num_tags = tensor_shape.dimension_value(potentials.shape[2])
# Computes forward decoding. Get last score and backpointers.
crf_fwd_cell = CrfDecodeForwardRnnCell(transition_params)
initial_state = array_ops.slice(potentials, [0, 0, 0], [-1, 1, -1])
initial_state = array_ops.squeeze(initial_state, axis=[1]) # [B, O]
inputs = array_ops.slice(potentials, [0, 1, 0], [-1, -1, -1]) # [B, T-1, O]
# Sequence length is not allowed to be less than zero.
sequence_length_less_one = math_ops.maximum(
constant_op.constant(0, dtype=sequence_length.dtype),
sequence_length - 1)
backpointers, last_score = rnn.dynamic_rnn( # [B, T - 1, O], [B, O]
crf_fwd_cell,
inputs=inputs,
sequence_length=sequence_length_less_one,
initial_state=initial_state,
time_major=False,
dtype=dtypes.int32)
backpointers = gen_array_ops.reverse_sequence( # [B, T - 1, O]
backpointers, sequence_length_less_one, seq_dim=1)
# Computes backward decoding. Extract tag indices from backpointers.
crf_bwd_cell = CrfDecodeBackwardRnnCell(num_tags)
initial_state = math_ops.cast(math_ops.argmax(last_score, axis=1), # [B]
dtype=dtypes.int32)
initial_state = array_ops.expand_dims(initial_state, axis=-1) # [B, 1]
decode_tags, _ = rnn.dynamic_rnn( # [B, T - 1, 1]
crf_bwd_cell,
inputs=backpointers,
sequence_length=sequence_length_less_one,
initial_state=initial_state,
time_major=False,
dtype=dtypes.int32)
decode_tags = array_ops.squeeze(decode_tags, axis=[2]) # [B, T - 1]
decode_tags = array_ops.concat([initial_state, decode_tags], # [B, T]
axis=1)
decode_tags = gen_array_ops.reverse_sequence( # [B, T]
decode_tags, sequence_length, seq_dim=1)
best_score = math_ops.reduce_max(last_score, axis=1) # [B]
return decode_tags, best_score
return utils.smart_cond(
pred=math_ops.equal(tensor_shape.dimension_value(potentials.shape[1]) or
array_ops.shape(potentials)[1], 1),
true_fn=_single_seq_fn,
false_fn=_multi_seq_fn)
| [
"[email protected]"
]
| |
38f2a3c6453da958082a8415834fed4b4f88c315 | d308fffe3db53b034132fb1ea6242a509f966630 | /pirates/leveleditor/worldData/interior_spanish_office_b.py | f2788bd890ca262be5775b0748f195eac97ad422 | [
"BSD-3-Clause"
]
| permissive | rasheelprogrammer/pirates | 83caac204965b77a1b9c630426588faa01a13391 | 6ca1e7d571c670b0d976f65e608235707b5737e3 | refs/heads/master | 2020-03-18T20:03:28.687123 | 2018-05-28T18:05:25 | 2018-05-28T18:05:25 | 135,193,362 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 17,202 | py | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.interior_spanish_office_b
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Objects': {'1156268617.43dzlu0n': {'Type': 'Building Interior', 'Name': '', 'Instanced': True, 'Objects': {'1172033810.72kmuller': {'Type': 'Interior_furnishings', 'DisableCollision': False, 'Hpr': VBase3(179.733, 0.0, 0.0), 'Pos': Point3(-0.688, -21.129, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/fireplace_stucco'}}, '1172099147.57kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(0.953, 23.712, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/counter_spanish'}}, '1172099221.96kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-179.909, 0.0, 0.0), 'Pos': Point3(-14.674, -22.743, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bookshelf_spanish'}}, '1172099260.19kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-179.909, 0.0, 0.0), 'Pos': Point3(13.553, -22.796, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/bookshelf_spanish'}}, '1172099285.99kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-179.401, 0.0, 0.0), 'Pos': Point3(-1.461, -10.912, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1172099292.96kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(-13.239, 0.0, 0.0), 'Pos': Point3(1.65, 0.907, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1172099299.27kmuller': {'Type': 'Furniture', 'DisableCollision': False, 'Hpr': VBase3(34.678, 0.0, 0.0), 'Pos': Point3(-4.303, 1.105, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/chair_bank'}}, '1172099440.27kmuller': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(-89.956, 0.0, 0.0), 'Pos': Point3(19.914, -7.875, 8.202), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Map_01'}}, '1172099473.02kmuller': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(-89.99, 0.0, 0.0), 'Pos': Point3(-19.987, -7.582, 8.3), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Map_02'}}, '1172099512.5kmuller': {'Type': 'Wall_Hangings', 'DisableCollision': False, 'Hpr': VBase3(-179.464, 0.0, 0.0), 'Pos': Point3(5.873, 48.131, 11.574), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/props/Map_03'}}, '1179356026.57dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '60.0000', 'DropOff': '0.0000', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': Point3(0.0, 0.0, 0.0), 'Intensity': '0.2727', 'LightType': 'AMBIENT', 'Pos': Point3(4.09, 27.89, 8.123), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (1, 1, 1, 1), 'Model': 'models/props/light_tool_bulb'}}, '1179356058.49dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '78.6364', 'DropOff': '6.8182', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': VBase3(180.0, -83.877, -180.0), 'Intensity': '1.6970', 'LightType': 'SPOT', 'Pos': Point3(7.748, 20.337, 62.215), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (1, 1, 1, 1), 'Model': 'models/props/light_tool_bulb'}}, '1179356208.68dzlu': {'Type': 'Light - Dynamic', 'Attenuation': '0.005', 'ConeAngle': '78.6364', 'DropOff': '12.2727', 'FlickRate': 0.5, 'Flickering': False, 'Hpr': VBase3(53.376, -18.9, 171.453), 'Intensity': '0.5758', 'LightType': 'SPOT', 'Pos': Point3(32.829, -3.667, 20.081), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (1, 1, 1, 1), 'Model': 'models/props/light_tool_bulb'}}, '1192814721.02akelts': {'Type': 'Effect Node', 'EffectName': 'torch_effect', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-0.422, -21.53, 1.154), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1257900091.39caoconno': {'Type': 'Door Locator Node', 'Name': 'door_locator', 'Hpr': VBase3(-180.0, 0.0, 0.0), 'Pos': Point3(-13.419, 47.56, 5.309), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1257900175.75caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(25.761, 0.0, 0.0), 'Pos': Point3(-3.201, -18.542, 5.677), 'Scale': VBase3(1.396, 1.396, 1.396), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoStocking02_winter09'}}, '1257900216.19caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(1.407, -18.437, 5.452), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoStocking01_winter09'}}, '1257900247.3caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(32.601, 0.0, 0.0), 'Pos': Point3(-0.896, -18.508, 5.677), 'Scale': VBase3(1.396, 1.396, 1.396), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoStocking03_winter09'}}, '1257900309.84caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-13.377, 45.212, 17.676), 'Scale': VBase3(1.114, 1.114, 1.114), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900441.91caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(178.085, 0.762, -0.0), 'Pos': Point3(12.36, -22.998, 15.521), 'Scale': VBase3(1.404, 1.404, 1.404), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900470.23caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(178.085, 0.762, -0.0), 'Pos': Point3(-13.597, -22.966, 15.532), 'Scale': VBase3(1.404, 1.404, 1.404), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900516.25caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(89.258, 1.238, 50.295), 'Pos': Point3(20.013, 28.541, 12.157), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900545.48caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-89.72, -2.885, 0.072), 'Pos': Point3(19.403, 26.892, 11.047), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900584.36caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-89.72, -2.885, 44.404), 'Pos': Point3(19.977, 25.568, 12.247), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900615.98caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-92.163, -2.731, 50.296), 'Pos': Point3(-19.968, 26.274, 12.932), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900616.06caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 1.084, 44.333), 'Pos': Point3(-19.851, 29.245, 13.023), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900616.09caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-92.165, -4.532, 50.264), 'Pos': Point3(-19.922, 9.685, 10.016), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900616.13caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 1.084, 0.0), 'Pos': Point3(-19.394, 27.911, 11.784), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900631.56caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-92.165, -4.532, 50.264), 'Pos': Point3(-19.978, -4.219, 10.005), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900631.61caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 2.886, 44.333), 'Pos': Point3(-19.859, -1.248, 10.094), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900631.69caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-92.165, -4.532, 50.264), 'Pos': Point3(-19.978, -4.219, 10.005), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900631.7caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 2.886, 0.0), 'Pos': Point3(-19.441, -2.581, 8.841), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900660.44caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-91.076, -4.533, 50.319), 'Pos': Point3(-19.848, -14.291, 9.879), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900660.53caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-91.076, -4.533, 50.319), 'Pos': Point3(-19.848, -14.291, 9.879), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900660.56caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(89.945, 2.885, -0.055), 'Pos': Point3(-19.342, -12.642, 8.717), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900660.5caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(89.945, 2.885, 44.278), 'Pos': Point3(-19.785, -11.318, 9.971), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900741.05caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-89.72, -2.885, 44.404), 'Pos': Point3(19.986, 9.661, 10.218), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900741.08caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(89.258, 1.238, 50.295), 'Pos': Point3(20.023, 12.634, 10.127), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900741.28caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-89.72, -2.885, 0.072), 'Pos': Point3(19.412, 10.985, 9.017), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900799.36caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 2.886, 0.0), 'Pos': Point3(-19.385, 11.323, 8.852), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}, '1257900799.39caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 2.886, 44.333), 'Pos': Point3(-19.803, 12.656, 10.105), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900799.45caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-92.165, -4.532, 50.264), 'Pos': Point3(-19.922, 9.685, 10.016), 'Scale': VBase3(2.155, 2.155, 2.155), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_candycane_winter09'}}, '1257900799.48caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(88.858, 2.886, 0.0), 'Pos': Point3(-19.385, 11.323, 8.852), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_decoSwag_winter08'}}}, 'Visual': {'Model': 'models/buildings/interior_spanish_store'}}}, 'Node Links': [], 'Layers': {}, 'ObjectIds': {'1156268617.43dzlu0n': '["Objects"]["1156268617.43dzlu0n"]', '1172033810.72kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172033810.72kmuller"]', '1172099147.57kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099147.57kmuller"]', '1172099221.96kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099221.96kmuller"]', '1172099260.19kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099260.19kmuller"]', '1172099285.99kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099285.99kmuller"]', '1172099292.96kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099292.96kmuller"]', '1172099299.27kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099299.27kmuller"]', '1172099440.27kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099440.27kmuller"]', '1172099473.02kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099473.02kmuller"]', '1172099512.5kmuller': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1172099512.5kmuller"]', '1179356026.57dzlu': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1179356026.57dzlu"]', '1179356058.49dzlu': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1179356058.49dzlu"]', '1179356208.68dzlu': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1179356208.68dzlu"]', '1192814721.02akelts': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1192814721.02akelts"]', '1257900091.39caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900091.39caoconno"]', '1257900175.75caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900175.75caoconno"]', '1257900216.19caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900216.19caoconno"]', '1257900247.3caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900247.3caoconno"]', '1257900309.84caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900309.84caoconno"]', '1257900441.91caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900441.91caoconno"]', '1257900470.23caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900470.23caoconno"]', '1257900516.25caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900516.25caoconno"]', '1257900545.48caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900545.48caoconno"]', '1257900584.36caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900584.36caoconno"]', '1257900615.98caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900615.98caoconno"]', '1257900616.06caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900616.06caoconno"]', '1257900616.09caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900616.09caoconno"]', '1257900616.13caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900616.13caoconno"]', '1257900631.56caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900631.56caoconno"]', '1257900631.61caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900631.61caoconno"]', '1257900631.69caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900631.69caoconno"]', '1257900631.7caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900631.7caoconno"]', '1257900660.44caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900660.44caoconno"]', '1257900660.53caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900660.53caoconno"]', '1257900660.56caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900660.56caoconno"]', '1257900660.5caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900660.5caoconno"]', '1257900741.05caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900741.05caoconno"]', '1257900741.08caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900741.08caoconno"]', '1257900741.28caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900741.28caoconno"]', '1257900799.36caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900799.36caoconno"]', '1257900799.39caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900799.39caoconno"]', '1257900799.45caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900799.45caoconno"]', '1257900799.48caoconno': '["Objects"]["1156268617.43dzlu0n"]["Objects"]["1257900799.48caoconno"]'}}
extraInfo = {'camPos': Point3(-0.272955, -10.6425, 0.0867991), 'camHpr': VBase3(0, 0, 0), 'focalLength': 0.7921423316, 'skyState': -1, 'fog': 0} | [
"[email protected]"
]
| |
64128404526fc7098153d4f5fada2b52e72e6af3 | 2b9397e9e26f7d97ce6983d36c9842ac773b70c6 | /operation/migrations/0071_auto_20190724_1239.py | 45003f13e5b832519139fd299b437ec39213d210 | []
| no_license | eakDev/aip-1 | 288ed7d7b8cf65c74b510f4f4e45292e3342796d | 3db2520e3c246e25e2cfa62e395a3ba6ebe37252 | refs/heads/main | 2023-05-02T08:57:42.449727 | 2021-05-23T10:16:59 | 2021-05-23T10:16:59 | 386,578,482 | 1 | 0 | null | 2021-07-16T09:15:22 | 2021-07-16T09:15:22 | null | UTF-8 | Python | false | false | 2,163 | py | # Generated by Django 2.1.1 on 2019-07-24 04:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('operation', '0070_projectitem_dependency'),
]
operations = [
migrations.RemoveField(
model_name='majorexpense',
name='budget',
),
migrations.RemoveField(
model_name='majorexpense',
name='spending',
),
migrations.AddField(
model_name='majorexpense',
name='admin',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Admin'),
),
migrations.AddField(
model_name='majorexpense',
name='drawings',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Drawings'),
),
migrations.AddField(
model_name='majorexpense',
name='equipment',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Equipment'),
),
migrations.AddField(
model_name='majorexpense',
name='labor',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Labor'),
),
migrations.AddField(
model_name='majorexpense',
name='materials',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Materials'),
),
migrations.AddField(
model_name='majorexpense',
name='overhead',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Overhead'),
),
migrations.AddField(
model_name='majorexpense',
name='total',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='Total'),
),
migrations.AddField(
model_name='majorexpense',
name='vat',
field=models.DecimalField(decimal_places=2, default=0, max_digits=20, verbose_name='VAT'),
),
]
| [
"[email protected]"
]
| |
9eace9f419b1b9ae0cad1b501f2f2357ec18e607 | 81e706b69c789aff05691c41fa79156942927f82 | /site-packages/tensorflow/python/framework/ops.py | 5affbf8b8f32469ada589bc76917838044bd011c | []
| no_license | yoncho/OpenCV-code | f5a1091ef32f3c8c3254ab93e083950b84c4fabd | bda2f793b11462e67c7ab644b342beffb871e3de | refs/heads/master | 2023-03-30T12:01:23.521511 | 2021-04-01T13:45:44 | 2021-04-01T13:45:44 | 291,398,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252,207 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and functions used to construct graphs."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import re
import sys
import threading
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import function_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import node_def_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.core.framework import versions_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import pywrap_tensorflow as c_api
from tensorflow.python import tf2
from tensorflow.python.eager import context
from tensorflow.python.eager import core
from tensorflow.python.eager import tape
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import registry
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import traceable_stack
from tensorflow.python.framework import versions
from tensorflow.python.ops import control_flow_util
from tensorflow.python.platform import app
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
from tensorflow.python.util import decorator_utils
from tensorflow.python.util import deprecation
from tensorflow.python.util import function_utils
from tensorflow.python.util import lock_util
from tensorflow.python.util import memory
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_stack
from tensorflow.python.util.deprecation import deprecated_args
from tensorflow.python.util.lazy_loader import LazyLoader
from tensorflow.python.util.tf_export import tf_export
# This is to avoid a circular dependency: ops -> tensor_spec -> ops
tensor_spec = LazyLoader(
"tensor_spec", globals(),
"tensorflow.python.framework.tensor_spec")
# Temporary global switches determining if we should enable the work-in-progress
# calls to the C API. These will be removed once all functionality is supported.
_USE_C_API = True
_USE_C_SHAPES = True
def tensor_id(tensor):
"""Returns a unique identifier for this Tensor."""
return tensor._id # pylint: disable=protected-access
class _UserDeviceSpec(object):
"""Store user-specified device and provide computation of merged device."""
def __init__(self, device_name_or_function):
self._device_name_or_function = device_name_or_function
self.display_name = str(self._device_name_or_function)
self.function = device_name_or_function
self.raw_string = None
if isinstance(device_name_or_function, pydev.MergeDevice):
self.is_null_merge = device_name_or_function.is_null_merge
elif callable(device_name_or_function):
self.is_null_merge = False
dev_func = self._device_name_or_function
func_name = function_utils.get_func_name(dev_func)
func_code = function_utils.get_func_code(dev_func)
if func_code:
fname = func_code.co_filename
lineno = func_code.co_firstlineno
else:
fname = "unknown"
lineno = -1
self.display_name = "%s<%s, %d>" % (func_name, fname, lineno)
elif device_name_or_function is None:
# NOTE(taylorrobie): This MUST be False. None signals a break in the
# device stack, so `is_null_merge` must be False for such a case to
# allow callers to safely skip over null merges without missing a None.
self.is_null_merge = False
else:
self.raw_string = device_name_or_function
self.function = pydev.merge_device(device_name_or_function)
self.is_null_merge = self.function.is_null_merge
# We perform this check in __init__ because it is of non-trivial cost,
# and self.string_merge is typically called many times.
self.fast_string_merge = isinstance(self.function, pydev.MergeDevice)
def string_merge(self, node_def):
if self.fast_string_merge:
return self.function.shortcut_string_merge(node_def)
return compat.as_str(_device_string(self.function(node_def)))
class NullContextmanager(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, type_arg, value_arg, traceback_arg):
return False # False values do not suppress exceptions
def _override_helper(clazz_object, operator, func):
"""Overrides (string) operator on Tensors to call func.
Args:
clazz_object: the class to override for; either Tensor or SparseTensor.
operator: the string name of the operator to override.
func: the function that replaces the overridden operator.
Raises:
ValueError: If operator has already been overwritten,
or if operator is not allowed to be overwritten.
"""
existing = getattr(clazz_object, operator, None)
if existing is not None:
# Check to see if this is a default method-wrapper or slot wrapper which
# will be true for the comparison operators.
if not isinstance(existing, type(object.__lt__)):
raise ValueError("operator %s cannot be overwritten again on class %s." %
(operator, clazz_object))
if operator not in Tensor.OVERLOADABLE_OPERATORS:
raise ValueError("Overriding %s is disallowed" % operator)
setattr(clazz_object, operator, func)
def _as_graph_element(obj):
"""Convert `obj` to a graph element if possible, otherwise return `None`.
Args:
obj: Object to convert.
Returns:
The result of `obj._as_graph_element()` if that method is available;
otherwise `None`.
"""
conv_fn = getattr(obj, "_as_graph_element", None)
if conv_fn and callable(conv_fn):
return conv_fn()
return None
_TENSOR_LIKE_TYPES = tuple()
def is_dense_tensor_like(t):
"""EXPERIMENTAL: Returns true if `t` implements the tensor interface.
See `register_dense_tensor_like_type()` for the current definition of a
"tensor-like type".
Args:
t: An object.
Returns:
True iff `t` is an instance of one of the registered "tensor-like" types.
"""
return isinstance(t, _TENSOR_LIKE_TYPES)
def register_dense_tensor_like_type(tensor_type):
"""EXPERIMENTAL: Registers `tensor_type` as implementing the tensor interface.
A "tensor-like type" can represent a single dense tensor, and implements
the `name` and `dtype` properties.
Args:
tensor_type: A type implementing the tensor interface.
Raises:
TypeError: If `tensor_type` does not implement the tensor interface.
"""
try:
if not isinstance(tensor_type.name, property):
raise TypeError("Type %s does not define a `name` property" %
tensor_type.__name__)
except AttributeError:
raise TypeError("Type %s does not define a `name` property" %
tensor_type.__name__)
try:
if not isinstance(tensor_type.dtype, property):
raise TypeError("Type %s does not define a `dtype` property" %
tensor_type.__name__)
except AttributeError:
raise TypeError("Type %s does not define a `dtype` property" %
tensor_type.__name__)
# We expect this list to be small, so choose quadratic complexity
# for registration, so that we have a tuple that can be used for
# more efficient `isinstance` checks later.
global _TENSOR_LIKE_TYPES
_TENSOR_LIKE_TYPES = tuple(list(_TENSOR_LIKE_TYPES) + [tensor_type])
def uid():
"""A unique (within this program execution) integer."""
return c_api.TFE_Py_UID()
def numpy_text(tensor, is_repr=False):
"""Human readable representation of a tensor's numpy value."""
if tensor.dtype.is_numpy_compatible:
text = repr(tensor.numpy()) if is_repr else str(tensor.numpy())
else:
text = "<unprintable>"
if "\n" in text:
text = "\n" + text
return text
# NOTE(ebrevdo): Do not subclass this. If you do, I will break you on purpose.
class _TensorLike(object):
"""Internal cls for grouping Tensor, SparseTensor, ..., for is_instance."""
pass
@tf_export("Tensor")
class Tensor(_TensorLike):
"""Represents one of the outputs of an `Operation`.
A `Tensor` is a symbolic handle to one of the outputs of an
`Operation`. It does not hold the values of that operation's output,
but instead provides a means of computing those values in a
TensorFlow `tf.compat.v1.Session`.
This class has two primary purposes:
1. A `Tensor` can be passed as an input to another `Operation`.
This builds a dataflow connection between operations, which
enables TensorFlow to execute an entire `Graph` that represents a
large, multi-step computation.
2. After the graph has been launched in a session, the value of the
`Tensor` can be computed by passing it to
`tf.Session.run`.
`t.eval()` is a shortcut for calling
`tf.compat.v1.get_default_session().run(t)`.
In the following example, `c`, `d`, and `e` are symbolic `Tensor`
objects, whereas `result` is a numpy array that stores a concrete
value:
```python
# Build a dataflow graph.
c = tf.constant([[1.0, 2.0], [3.0, 4.0]])
d = tf.constant([[1.0, 1.0], [0.0, 1.0]])
e = tf.matmul(c, d)
# Construct a `Session` to execute the graph.
sess = tf.compat.v1.Session()
# Execute the graph and store the value that `e` represents in `result`.
result = sess.run(e)
```
"""
# List of Python operators that we allow to override.
OVERLOADABLE_OPERATORS = {
# Binary.
"__add__",
"__radd__",
"__sub__",
"__rsub__",
"__mul__",
"__rmul__",
"__div__",
"__rdiv__",
"__truediv__",
"__rtruediv__",
"__floordiv__",
"__rfloordiv__",
"__mod__",
"__rmod__",
"__lt__",
"__le__",
"__gt__",
"__ge__",
"__and__",
"__rand__",
"__or__",
"__ror__",
"__xor__",
"__rxor__",
"__getitem__",
"__pow__",
"__rpow__",
# Unary.
"__invert__",
"__neg__",
"__abs__",
"__matmul__",
"__rmatmul__"
}
def __init__(self, op, value_index, dtype):
"""Creates a new `Tensor`.
Args:
op: An `Operation`. `Operation` that computes this tensor.
value_index: An `int`. Index of the operation's endpoint that produces
this tensor.
dtype: A `DType`. Type of elements stored in this tensor.
Raises:
TypeError: If the op is not an `Operation`.
"""
if not isinstance(op, Operation):
raise TypeError("op needs to be an Operation: %s" % op)
self._op = op
self._value_index = value_index
self._dtype = dtypes.as_dtype(dtype)
# This will be set by self._as_tf_output().
self._tf_output = None
# This will be set by self.shape().
self._shape_val = None
# List of operations that use this Tensor as input. We maintain this list
# to easily navigate a computation graph.
self._consumers = []
self._id = uid()
self._name = None
@property
def op(self):
"""The `Operation` that produces this tensor as an output."""
return self._op
@property
def dtype(self):
"""The `DType` of elements in this tensor."""
return self._dtype
@property
def graph(self):
"""The `Graph` that contains this tensor."""
return self._op.graph
@property
def name(self):
"""The string name of this tensor."""
if self._name is None:
if not self._op.name:
raise ValueError("Operation was not named: %s" % self._op)
self._name = "%s:%d" % (self._op.name, self._value_index)
return self._name
@property
def device(self):
"""The name of the device on which this tensor will be produced, or None."""
return self._op.device
@property
def shape(self):
"""Returns the `TensorShape` that represents the shape of this tensor.
The shape is computed using shape inference functions that are
registered in the Op for each `Operation`. See
`tf.TensorShape`
for more details of what a shape represents.
The inferred shape of a tensor is used to provide shape
information without having to launch the graph in a session. This
can be used for debugging, and providing early error messages. For
example:
```python
c = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
print(c.shape)
==> TensorShape([Dimension(2), Dimension(3)])
d = tf.constant([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0], [0.0, 1.0]])
print(d.shape)
==> TensorShape([Dimension(4), Dimension(2)])
# Raises a ValueError, because `c` and `d` do not have compatible
# inner dimensions.
e = tf.matmul(c, d)
f = tf.matmul(c, d, transpose_a=True, transpose_b=True)
print(f.shape)
==> TensorShape([Dimension(3), Dimension(4)])
```
In some cases, the inferred shape may have unknown dimensions. If
the caller has additional information about the values of these
dimensions, `Tensor.set_shape()` can be used to augment the
inferred shape.
Returns:
A `TensorShape` representing the shape of this tensor.
"""
if self._shape_val is None:
self._shape_val = self._c_api_shape()
return self._shape_val
def _get_input_ops_without_shapes(self, target_op):
"""Returns ops needing shape inference to compute target_op's shape."""
result = []
stack = [self._op]
visited = set()
while stack:
op = stack.pop()
if op in visited:
continue
result.append(op)
stack.extend(t.op for t in op.inputs if t._shape_val is None)
visited.add(op)
return result
def _c_api_shape(self):
"""Returns the TensorShape of this tensor according to the C API."""
c_graph = self._op._graph._c_graph # pylint: disable=protected-access
shape_vector, unknown_shape = c_api.TF_GraphGetTensorShapeHelper(
c_graph, self._as_tf_output())
if unknown_shape:
return tensor_shape.unknown_shape()
else:
shape_vector = [None if d == -1 else d for d in shape_vector]
return tensor_shape.TensorShape(shape_vector)
@property
def _shape(self):
logging.warning("Tensor._shape is private, use Tensor.shape "
"instead. Tensor._shape will eventually be removed.")
return self.shape
@_shape.setter
def _shape(self, value):
raise ValueError(
"Tensor._shape cannot be assigned, use Tensor.set_shape instead.")
def __iter__(self):
if not context.executing_eagerly():
raise TypeError(
"Tensor objects are only iterable when eager execution is "
"enabled. To iterate over this tensor use tf.map_fn.")
shape = self._shape_tuple()
if shape is None:
raise TypeError("Cannot iterate over a tensor with unknown shape.")
if not shape:
raise TypeError("Cannot iterate over a scalar tensor.")
if shape[0] is None:
raise TypeError(
"Cannot iterate over a tensor with unknown first dimension.")
for i in xrange(shape[0]):
yield self[i]
def _shape_as_list(self):
if self.shape.ndims is not None:
return [dim.value for dim in self.shape.dims]
else:
return None
def _shape_tuple(self):
shape = self._shape_as_list()
if shape is None:
return None
return tuple(shape)
def _rank(self):
"""Integer rank of this Tensor, if known, else None.
Returns:
Integer rank or None
"""
return self.shape.ndims
def get_shape(self):
"""Alias of Tensor.shape."""
return self.shape
def set_shape(self, shape):
"""Updates the shape of this tensor.
This method can be called multiple times, and will merge the given
`shape` with the current shape of this tensor. It can be used to
provide additional information about the shape of this tensor that
cannot be inferred from the graph alone. For example, this can be used
to provide additional information about the shapes of images:
```python
_, image_data = tf.compat.v1.TFRecordReader(...).read(...)
image = tf.image.decode_png(image_data, channels=3)
# The height and width dimensions of `image` are data dependent, and
# cannot be computed without executing the op.
print(image.shape)
==> TensorShape([Dimension(None), Dimension(None), Dimension(3)])
# We know that each image in this dataset is 28 x 28 pixels.
image.set_shape([28, 28, 3])
print(image.shape)
==> TensorShape([Dimension(28), Dimension(28), Dimension(3)])
```
NOTE: This shape is not enforced at runtime. Setting incorrect shapes can
result in inconsistencies between the statically-known graph and the runtime
value of tensors. For runtime validation of the shape, use `tf.ensure_shape`
instead.
Args:
shape: A `TensorShape` representing the shape of this tensor, a
`TensorShapeProto`, a list, a tuple, or None.
Raises:
ValueError: If `shape` is not compatible with the current shape of
this tensor.
"""
# Reset cached shape.
self._shape_val = None
# We want set_shape to be reflected in the C API graph for when we run it.
if not isinstance(shape, tensor_shape.TensorShape):
shape = tensor_shape.TensorShape(shape)
dim_list = []
if shape.dims is None:
unknown_shape = True
else:
unknown_shape = False
for dim in shape.dims:
if dim.value is None:
dim_list.append(-1)
else:
dim_list.append(dim.value)
try:
c_api.TF_GraphSetTensorShape_wrapper(
self._op._graph._c_graph, # pylint: disable=protected-access
self._as_tf_output(),
dim_list,
unknown_shape)
except errors.InvalidArgumentError as e:
# Convert to ValueError for backwards compatibility.
raise ValueError(str(e))
@property
def value_index(self):
"""The index of this tensor in the outputs of its `Operation`."""
return self._value_index
def consumers(self):
"""Returns a list of `Operation`s that consume this tensor.
Returns:
A list of `Operation`s.
"""
consumer_names = c_api.TF_OperationOutputConsumers_wrapper(
self._as_tf_output())
# pylint: disable=protected-access
return [
self.graph._get_operation_by_name_unsafe(name)
for name in consumer_names
]
# pylint: enable=protected-access
def _as_node_def_input(self):
"""Return a value to use for the NodeDef "input" attribute.
The returned string can be used in a NodeDef "input" attribute
to indicate that the NodeDef uses this Tensor as input.
Raises:
ValueError: if this Tensor's Operation does not have a name.
Returns:
a string.
"""
if not self._op.name:
raise ValueError("Operation was not named: %s" % self._op)
if self._value_index == 0:
return self._op.name
else:
return "%s:%d" % (self._op.name, self._value_index)
def _as_tf_output(self):
# pylint: disable=protected-access
# NOTE: Beyond preventing unnecessary (re-)allocation, the cached object
# also guarantees that a dictionary of tf_output objects will retain a
# deterministic (yet unsorted) order which prevents memory blowup in the
# cache of executor(s) stored for every session.
if self._tf_output is None:
self._tf_output = c_api_util.tf_output(self.op._c_op, self.value_index)
return self._tf_output
# pylint: enable=protected-access
def __str__(self):
return "Tensor(\"%s\"%s%s%s)" % (
self.name,
(", shape=%s" %
self.get_shape()) if self.get_shape().ndims is not None else "",
(", dtype=%s" % self._dtype.name) if self._dtype else "",
(", device=%s" % self.device) if self.device else "")
def __repr__(self):
return "<tf.Tensor '%s' shape=%s dtype=%s>" % (self.name, self.get_shape(),
self._dtype.name)
def __hash__(self):
# Necessary to support Python's collection membership operators
return id(self)
def __eq__(self, other):
# Necessary to support Python's collection membership operators
# NOTE(taylorrobie): equivalent to: id(self) == id(other)
return self is other
def __copy__(self):
# TODO(b/77597810): get rid of Tensor copies.
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
return result
# NOTE(mrry): This enables the Tensor's overloaded "right" binary
# operators to run when the left operand is an ndarray, because it
# accords the Tensor class higher priority than an ndarray, or a
# numpy matrix.
# TODO(mrry): Convert this to using numpy's __numpy_ufunc__
# mechanism, which allows more control over how Tensors interact
# with ndarrays.
__array_priority__ = 100
@staticmethod
def _override_operator(operator, func):
_override_helper(Tensor, operator, func)
def __bool__(self):
"""Dummy method to prevent a tensor from being used as a Python `bool`.
This overload raises a `TypeError` when the user inadvertently
treats a `Tensor` as a boolean (e.g. in an `if` statement). For
example:
```python
if tf.constant(True): # Will raise.
# ...
if tf.constant(5) < tf.constant(7): # Will raise.
# ...
```
This disallows ambiguities between testing the Python value vs testing the
dynamic condition of the `Tensor`.
Raises:
`TypeError`.
"""
raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. "
"Use `if t is not None:` instead of `if t:` to test if a "
"tensor is defined, and use TensorFlow ops such as "
"tf.cond to execute subgraphs conditioned on the value of "
"a tensor.")
def __nonzero__(self):
"""Dummy method to prevent a tensor from being used as a Python `bool`.
This is the Python 2.x counterpart to `__bool__()` above.
Raises:
`TypeError`.
"""
raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. "
"Use `if t is not None:` instead of `if t:` to test if a "
"tensor is defined, and use TensorFlow ops such as "
"tf.cond to execute subgraphs conditioned on the value of "
"a tensor.")
def eval(self, feed_dict=None, session=None):
"""Evaluates this tensor in a `Session`.
Calling this method will execute all preceding operations that
produce the inputs needed for the operation that produces this
tensor.
*N.B.* Before invoking `Tensor.eval()`, its graph must have been
launched in a session, and either a default session must be
available, or `session` must be specified explicitly.
Args:
feed_dict: A dictionary that maps `Tensor` objects to feed values. See
`tf.Session.run` for a description of the valid feed values.
session: (Optional.) The `Session` to be used to evaluate this tensor. If
none, the default session will be used.
Returns:
A numpy array corresponding to the value of this tensor.
"""
return _eval_using_default_session(self, feed_dict, self.graph, session)
# TODO(agarwal): consider getting rid of this.
class _EagerTensorBase(Tensor):
"""Base class for EagerTensor."""
@property
def dtype(self):
# Note: using the intern table directly here as this is
# performance-sensitive in some models.
return dtypes._INTERN_TABLE[self._datatype_enum()] # pylint: disable=protected-access
def numpy(self):
"""Returns a numpy array or a scalar with the same contents as the Tensor.
TODO(ashankar,agarwal): Perhaps this should NOT reference the underlying
buffer but instead always explicitly copy? Note that currently it may or may
not copy based on whether the numpy data is properly aligned or not.
Returns:
A numpy array or a scalar. Numpy array may share memory with the
Tensor object. Any changes to one may be reflected in the other. A scalar
value is returned when self has rank 0.
Raises:
ValueError: if the type of this Tensor is not representable in numpy.
"""
if self.dtype == dtypes.resource:
raise ValueError("Resource handles are not convertible to numpy.")
maybe_arr = self._cpu_nograd()._numpy() # pylint: disable=protected-access
return maybe_arr.copy() if isinstance(maybe_arr, np.ndarray) else maybe_arr
# __int__, __float__ and __index__ may copy the tensor to CPU and
# only work for scalars; values are cast as per numpy.
def __int__(self):
return int(self.numpy())
def __float__(self):
return float(self.numpy())
def __index__(self):
return int(self.numpy())
def __array__(self, dtype=None):
return np.asarray(self.numpy(), dtype=dtype)
def __format__(self, format_spec):
return self.numpy().__format__(format_spec)
def __reduce__(self):
return (convert_to_tensor, (self.numpy(),))
def _numpy(self):
raise NotImplementedError()
@property
def backing_device(self):
"""Returns the name of the device holding this tensor's memory.
`.backing_device` is usually the same as `.device`, which returns
the device on which the kernel of the operation that produced this tensor
ran. However, some operations can produce tensors on a different device
(e.g., an operation that executes on the GPU but produces output tensors
in host memory).
"""
raise NotImplementedError()
def __copy__(self):
# Eager Tensors are immutable so it's safe to return themselves as a copy.
return self
def __deepcopy__(self, memo):
# Eager Tensors are immutable so it's safe to return themselves as a copy.
del memo
return self
def _datatype_enum(self):
raise NotImplementedError()
def _shape_tuple(self):
"""The shape of this Tensor, as a tuple.
This is more performant than tuple(shape().as_list()) as it avoids
two list and one object creation. Marked private for now as from an API
perspective, it would be better to have a single performant way of
getting a shape rather than exposing shape() and shape_tuple()
(and heaven forbid, shape_list() etc. as well!). Punting on that for now,
but ideally one would work things out and remove the need for this method.
Returns:
tuple with the shape.
"""
raise NotImplementedError()
def _rank(self):
"""Integer rank of this Tensor.
Unlike regular Tensors, the rank is always known for EagerTensors.
This is more performant than len(self._shape_tuple())
Returns:
Integer rank
"""
raise NotImplementedError()
def _num_elements(self):
"""Number of elements of this Tensor.
Unlike regular Tensors, the number of elements is always known for
EagerTensors.
This is more performant than tensor.shape.num_elements
Returns:
Long - num elements in the tensor
"""
raise NotImplementedError()
def _copy_to_device(self, context, device): # pylint: disable=redefined-outer-name
raise NotImplementedError()
def __str__(self):
return "tf.Tensor(%s, shape=%s, dtype=%s)" % (numpy_text(self), self.shape,
self.dtype.name)
def __repr__(self):
return "<tf.Tensor: id=%s, shape=%s, dtype=%s, numpy=%s>" % (
self._id, self.shape, self.dtype.name, numpy_text(self, is_repr=True))
@staticmethod
def _override_operator(name, func):
setattr(_EagerTensorBase, name, func)
def _copy_nograd(self, ctx=None, device_name=None):
"""Copies tensor to dest device, but doesn't record the operation."""
# pylint: disable=protected-access
# Creates a new tensor on the dest device.
if ctx is None:
ctx = context.context()
if device_name is None:
device_name = ctx.device_name
# pylint: disable=protected-access
try:
ctx.ensure_initialized()
new_tensor = self._copy_to_device(context=ctx._handle, device=device_name)
except core._NotOkStatusException as e:
six.raise_from(core._status_to_exception(e.code, e.message), None)
return new_tensor
def _copy(self, ctx=None, device_name=None):
"""Copies tensor to dest device."""
new_tensor = self._copy_nograd(ctx, device_name)
# Record the copy on tape and define backprop copy as well.
if context.executing_eagerly():
self_device = self.device
def grad_fun(dresult):
return [
dresult._copy(device_name=self_device)
if hasattr(dresult, "_copy") else dresult
]
tape.record_operation("_copy", [new_tensor], [self], grad_fun)
return new_tensor
# pylint: enable=protected-access
@property
def shape(self):
if self._tensor_shape is None: # pylint: disable=access-member-before-definition
# `_tensor_shape` is declared and defined in the definition of
# `EagerTensor`, in C.
self._tensor_shape = tensor_shape.TensorShape(self._shape_tuple())
return self._tensor_shape
def get_shape(self):
"""Alias of Tensor.shape."""
return self.shape
def _shape_as_list(self):
"""The shape of the tensor as a list."""
return list(self._shape_tuple())
@property
def ndim(self):
"""Returns the number of Tensor dimensions."""
return self.shape.ndims
def __len__(self):
"""Returns the length of the first dimension in the Tensor."""
if not self.shape.ndims:
raise TypeError("Scalar tensor has no `len()`")
return self._shape_tuple()[0]
def _cpu_nograd(self):
"""A copy of this Tensor with contents backed by host memory.
The copy cannot be differentiated through.
Returns:
A CPU-memory backed Tensor object with the same contents as this Tensor.
"""
return self._copy_nograd(context.context(), "CPU:0")
def cpu(self):
"""A copy of this Tensor with contents backed by host memory."""
return self._copy(context.context(), "CPU:0")
def gpu(self, gpu_index=0):
"""A copy of this Tensor with contents backed by memory on the GPU.
Arguments:
gpu_index: Identifies which GPU to place the contents on the returned
Tensor in.
Returns:
A GPU-memory backed Tensor object initialized with the same contents
as this Tensor.
"""
return self._copy(context.context(), "GPU:" + str(gpu_index))
def __bool__(self):
return bool(self.numpy())
def __nonzero__(self):
return self.__bool__()
def set_shape(self, shape):
if not self.shape.is_compatible_with(shape):
raise ValueError(
"Tensor's shape %s is not compatible with supplied shape %s" %
(self.shape, shape))
# Methods not supported / implemented for Eager Tensors.
@property
def op(self):
raise AttributeError(
"Tensor.op is meaningless when eager execution is enabled.")
@property
def graph(self):
raise AttributeError(
"Tensor.graph is meaningless when eager execution is enabled.")
@property
def name(self):
raise AttributeError(
"Tensor.name is meaningless when eager execution is enabled.")
@property
def value_index(self):
raise AttributeError(
"Tensor.value_index is meaningless when eager execution is enabled.")
def consumers(self):
raise NotImplementedError(
"Tensor.consumers is meaningless when eager execution is enabled.")
def _add_consumer(self, consumer):
raise NotImplementedError(
"_add_consumer not supported when eager execution is enabled.")
def _as_node_def_input(self):
raise NotImplementedError(
"_as_node_def_input not supported when eager execution is enabled.")
def _as_tf_output(self):
raise NotImplementedError(
"_as_tf_output not supported when eager execution is enabled.")
def eval(self, feed_dict=None, session=None):
raise NotImplementedError(
"eval is not supported when eager execution is enabled, "
"is .numpy() what you're looking for?")
# This call creates an EagerTensor class, as a subclass of _EagerTensorBase, and
# registers it with the current module.
EagerTensor = c_api.TFE_Py_InitEagerTensor(_EagerTensorBase)
def _TensorTensorConversionFunction(t, dtype=None, name=None, as_ref=False):
_ = name, as_ref
if dtype and not dtype.is_compatible_with(t.dtype):
raise ValueError(
"Tensor conversion requested dtype %s for Tensor with dtype %s: %r" %
(dtype.name, t.dtype.name, str(t)))
return t
_tensor_conversion_func_registry = {
0: [(Tensor, _TensorTensorConversionFunction)]
}
_tensor_conversion_func_cache = {}
_tensor_conversion_func_lock = threading.Lock()
register_dense_tensor_like_type(Tensor)
@tf_export(v1=["convert_to_tensor"])
def convert_to_tensor(value,
dtype=None,
name=None,
preferred_dtype=None,
dtype_hint=None):
"""Converts the given `value` to a `Tensor`.
This function converts Python objects of various types to `Tensor`
objects. It accepts `Tensor` objects, numpy arrays, Python lists,
and Python scalars. For example:
```python
import numpy as np
def my_func(arg):
arg = tf.convert_to_tensor(arg, dtype=tf.float32)
return tf.matmul(arg, arg) + arg
# The following calls are equivalent.
value_1 = my_func(tf.constant([[1.0, 2.0], [3.0, 4.0]]))
value_2 = my_func([[1.0, 2.0], [3.0, 4.0]])
value_3 = my_func(np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32))
```
This function can be useful when composing a new operation in Python
(such as `my_func` in the example above). All standard Python op
constructors apply this function to each of their Tensor-valued
inputs, which allows those ops to accept numpy arrays, Python lists,
and scalars in addition to `Tensor` objects.
Note: This function diverges from default Numpy behavior for `float` and
`string` types when `None` is present in a Python list or scalar. Rather
than silently converting `None` values, an error will be thrown.
Args:
value: An object whose type has a registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the type
is inferred from the type of `value`.
name: Optional name to use if a new `Tensor` is created.
preferred_dtype: Optional element type for the returned tensor, used when
dtype is None. In some cases, a caller may not have a dtype in mind when
converting to a tensor, so preferred_dtype can be used as a soft
preference. If the conversion to `preferred_dtype` is not possible, this
argument has no effect.
dtype_hint: same meaning as preferred_dtype, and overrides it.
Returns:
A `Tensor` based on `value`.
Raises:
TypeError: If no conversion function is registered for `value` to `dtype`.
RuntimeError: If a registered conversion function returns an invalid value.
ValueError: If the `value` is a tensor not of given `dtype` in graph mode.
"""
preferred_dtype = deprecation.deprecated_argument_lookup(
"dtype_hint", dtype_hint, "preferred_dtype", preferred_dtype)
return convert_to_tensor_v2(value, dtype, preferred_dtype, name)
@tf_export("convert_to_tensor", v1=[])
def convert_to_tensor_v2(value, dtype=None, dtype_hint=None, name=None):
"""Converts the given `value` to a `Tensor`.
This function converts Python objects of various types to `Tensor`
objects. It accepts `Tensor` objects, numpy arrays, Python lists,
and Python scalars. For example:
```python
import numpy as np
def my_func(arg):
arg = tf.convert_to_tensor(arg, dtype=tf.float32)
return tf.matmul(arg, arg) + arg
# The following calls are equivalent.
value_1 = my_func(tf.constant([[1.0, 2.0], [3.0, 4.0]]))
value_2 = my_func([[1.0, 2.0], [3.0, 4.0]])
value_3 = my_func(np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32))
```
This function can be useful when composing a new operation in Python
(such as `my_func` in the example above). All standard Python op
constructors apply this function to each of their Tensor-valued
inputs, which allows those ops to accept numpy arrays, Python lists,
and scalars in addition to `Tensor` objects.
Note: This function diverges from default Numpy behavior for `float` and
`string` types when `None` is present in a Python list or scalar. Rather
than silently converting `None` values, an error will be thrown.
Args:
value: An object whose type has a registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the type
is inferred from the type of `value`.
dtype_hint: Optional element type for the returned tensor, used when dtype
is None. In some cases, a caller may not have a dtype in mind when
converting to a tensor, so dtype_hint can be used as a soft preference.
If the conversion to `dtype_hint` is not possible, this argument has no
effect.
name: Optional name to use if a new `Tensor` is created.
Returns:
A `Tensor` based on `value`.
Raises:
TypeError: If no conversion function is registered for `value` to `dtype`.
RuntimeError: If a registered conversion function returns an invalid value.
ValueError: If the `value` is a tensor not of given `dtype` in graph mode.
"""
return internal_convert_to_tensor(
value=value,
dtype=dtype,
name=name,
preferred_dtype=dtype_hint,
as_ref=False)
def _error_prefix(name):
return "" if name is None else "%s: " % name
def internal_convert_to_tensor(value,
dtype=None,
name=None,
as_ref=False,
preferred_dtype=None,
ctx=None,
accept_symbolic_tensors=True,
accept_composite_tensors=False):
"""Implementation of the public convert_to_tensor."""
if ctx is None:
ctx = context.context()
if isinstance(value, EagerTensor):
if ctx.executing_eagerly():
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
value = _TensorTensorConversionFunction(value, dtype=dtype)
return value
else:
graph = get_default_graph()
if not graph.building_function:
raise RuntimeError("Attempting to capture an EagerTensor without "
"building a function.")
return graph.capture(value, name=name)
elif ((not accept_symbolic_tensors) and isinstance(value, Tensor) and
ctx.executing_eagerly()):
# Found a symbolic tensor in an eager context.
# This happens when we use the Keras functional API (i.e. calling layers
# on the output of `keras.Input()`, which is symbolic) while eager
# execution is enabled.
if _is_keras_symbolic_tensor(value):
# If the graph of the tensor isn't the Keras graph, we should still
# fail, for the time being. TODO(fchollet): consider allowing
# all symbolic tensors to raise this exception in this case.
raise core._SymbolicException( # pylint: disable=protected-access
"Using the symbolic output of a Keras layer during eager execution.")
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
unwrapped_type = type(value)
conversion_func_list = _tensor_conversion_func_cache.get(unwrapped_type, None)
if conversion_func_list is None:
with _tensor_conversion_func_lock:
conversion_func_list = []
for _, funcs_at_priority in sorted(
_tensor_conversion_func_registry.items()):
for base_type, conversion_func in funcs_at_priority:
if isinstance(value, base_type):
conversion_func_list.append((base_type, conversion_func))
_tensor_conversion_func_cache[unwrapped_type] = conversion_func_list
for base_type, conversion_func in conversion_func_list:
# If dtype is None but preferred_dtype is not None, we try to
# cast to preferred_dtype first.
ret = None
if dtype is None and preferred_dtype is not None:
try:
ret = conversion_func(
value, dtype=preferred_dtype, name=name, as_ref=as_ref)
except (TypeError, ValueError, errors.UnimplementedError,
errors.InvalidArgumentError):
# Could not coerce the conversion to use the preferred dtype.
ret = None
if ret is not None and ret is not NotImplemented:
if (ret.dtype.base_dtype !=
dtypes.as_dtype(preferred_dtype).base_dtype):
raise TypeError("convert_to_tensor did not convert to "
"the preferred dtype: %s vs %s " %
(ret.dtype.base_dtype,
dtypes.as_dtype(preferred_dtype).base_dtype))
if ret is None:
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
if ret is NotImplemented:
continue
is_acceptable_type = (
isinstance(ret, Tensor) or
(accept_composite_tensors and
isinstance(ret, composite_tensor.CompositeTensor)))
if not is_acceptable_type:
raise RuntimeError(
"%sConversion function %r for type %s returned non-Tensor: %r" %
(_error_prefix(name), conversion_func, base_type, ret))
if dtype and not dtype.is_compatible_with(ret.dtype):
raise RuntimeError(
"%sConversion function %r for type %s returned incompatible "
"dtype: requested = %s, actual = %s" %
(_error_prefix(name), conversion_func, base_type, dtype.name,
ret.dtype.name))
return ret
raise TypeError("%sCannot convert %r with type %s to Tensor: "
"no conversion function registered." %
(_error_prefix(name), value, unwrapped_type))
def internal_convert_n_to_tensor(values,
dtype=None,
name=None,
as_ref=False,
preferred_dtype=None,
ctx=None):
"""Converts `values` to a list of `Tensor` objects.
Args:
values: A list of objects that can be consumed by `tf.convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` objects.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
as_ref: True if the caller wants the results as ref tensors.
preferred_dtype: Optional element type for the returned tensors, used when
dtype is None. In some cases, a caller may not have a dtype in mind when
converting to a tensor, so preferred_dtype can be used as a soft
preference. If the conversion to `preferred_dtype` is not possible, this
argument has no effect.
ctx: The value of context.context().
Returns:
A list of `Tensor` and/or `IndexedSlices` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
if not isinstance(values, collections.Sequence):
raise TypeError("values must be a sequence.")
ret = []
if ctx is None:
ctx = context.context()
for i, value in enumerate(values):
n = None if name is None else "%s_%d" % (name, i)
ret.append(
internal_convert_to_tensor(
value,
dtype=dtype,
name=n,
as_ref=as_ref,
preferred_dtype=preferred_dtype,
ctx=ctx))
return ret
def convert_n_to_tensor(values, dtype=None, name=None, preferred_dtype=None):
"""Converts `values` to a list of `Tensor` objects.
Args:
values: A list of objects that can be consumed by `tf.convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` objects.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
preferred_dtype: Optional element type for the returned tensors, used when
dtype is None. In some cases, a caller may not have a dtype in mind when
converting to a tensor, so preferred_dtype can be used as a soft
preference. If the conversion to `preferred_dtype` is not possible, this
argument has no effect.
Returns:
A list of `Tensor` and/or `IndexedSlices` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
return internal_convert_n_to_tensor(
values=values,
dtype=dtype,
name=name,
preferred_dtype=preferred_dtype,
as_ref=False)
@tf_export(v1=["convert_to_tensor_or_indexed_slices"])
def convert_to_tensor_or_indexed_slices(value, dtype=None, name=None):
"""Converts the given object to a `Tensor` or an `IndexedSlices`.
If `value` is an `IndexedSlices` or `SparseTensor` it is returned
unmodified. Otherwise, it is converted to a `Tensor` using
`convert_to_tensor()`.
Args:
value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed
by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`IndexedSlices`.
name: (Optional.) A name to use if a new `Tensor` is created.
Returns:
A `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
return internal_convert_to_tensor_or_indexed_slices(
value=value, dtype=dtype, name=name, as_ref=False)
def internal_convert_to_tensor_or_indexed_slices(value,
dtype=None,
name=None,
as_ref=False):
"""Converts the given object to a `Tensor` or an `IndexedSlices`.
If `value` is an `IndexedSlices` or `SparseTensor` it is returned
unmodified. Otherwise, it is converted to a `Tensor` using
`convert_to_tensor()`.
Args:
value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed
by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`IndexedSlices`.
name: (Optional.) A name to use if a new `Tensor` is created.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
if isinstance(value, EagerTensor) and not context.executing_eagerly():
return internal_convert_to_tensor(
value, dtype=dtype, name=name, as_ref=as_ref)
elif isinstance(value, _TensorLike):
if dtype and not dtypes.as_dtype(dtype).is_compatible_with(value.dtype):
raise ValueError(
"Tensor conversion requested dtype %s for Tensor with dtype %s: %r" %
(dtypes.as_dtype(dtype).name, value.dtype.name, str(value)))
return value
else:
return internal_convert_to_tensor(
value, dtype=dtype, name=name, as_ref=as_ref)
def internal_convert_n_to_tensor_or_indexed_slices(values,
dtype=None,
name=None,
as_ref=False):
"""Converts `values` to a list of `Tensor` or `IndexedSlices` objects.
Any `IndexedSlices` or `SparseTensor` objects in `values` are returned
unmodified.
Args:
values: A list of `None`, `IndexedSlices`, `SparseTensor`, or objects that
can be consumed by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`IndexedSlices`.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A list of `Tensor`, `IndexedSlices`, `SparseTensor` and/or `None` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
if not isinstance(values, collections.Sequence):
raise TypeError("values must be a sequence.")
ret = []
for i, value in enumerate(values):
if value is None:
ret.append(value)
else:
n = None if name is None else "%s_%d" % (name, i)
ret.append(
internal_convert_to_tensor_or_indexed_slices(
value, dtype=dtype, name=n, as_ref=as_ref))
return ret
def convert_n_to_tensor_or_indexed_slices(values, dtype=None, name=None):
"""Converts `values` to a list of `Output` or `IndexedSlices` objects.
Any `IndexedSlices` or `SparseTensor` objects in `values` are returned
unmodified.
Args:
values: A list of `None`, `IndexedSlices`, `SparseTensor`, or objects that
can be consumed by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor`
`IndexedSlices`.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
Returns:
A list of `Tensor`, `IndexedSlices`, and/or `SparseTensor` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
return internal_convert_n_to_tensor_or_indexed_slices(
values=values, dtype=dtype, name=name, as_ref=False)
def convert_to_tensor_or_composite(value, dtype=None, name=None):
"""Converts the given object to a `Tensor` or `CompositeTensor`.
If `value` is a `CompositeTensor` it is returned unmodified. Otherwise, it
is converted to a `Tensor` using `convert_to_tensor()`.
Args:
value: A `CompositeTensor` or an object that can be consumed by
`convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`CompositeTensor`.
name: (Optional.) A name to use if a new `Tensor` is created.
Returns:
A `Tensor` or `CompositeTensor`, based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
return internal_convert_to_tensor_or_composite(
value=value, dtype=dtype, name=name, as_ref=False)
def internal_convert_to_tensor_or_composite(value,
dtype=None,
name=None,
as_ref=False):
"""Converts the given object to a `Tensor` or `CompositeTensor`.
If `value` is a `CompositeTensor` it is returned unmodified. Otherwise, it
is converted to a `Tensor` using `convert_to_tensor()`.
Args:
value: A `CompositeTensor`, or an object that can be consumed by
`convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`CompositeTensor`.
name: (Optional.) A name to use if a new `Tensor` is created.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A `Tensor` or `CompositeTensor`, based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
if isinstance(value, composite_tensor.CompositeTensor):
value_dtype = getattr(value, "dtype", None)
if dtype and not dtypes.as_dtype(dtype).is_compatible_with(value_dtype):
raise ValueError(
"Tensor conversion requested dtype %s for Tensor with dtype %s: %r" %
(dtypes.as_dtype(dtype).name, value.dtype.name, str(value)))
return value
else:
return internal_convert_to_tensor(
value,
dtype=dtype,
name=name,
as_ref=as_ref,
accept_composite_tensors=True)
def internal_convert_n_to_tensor_or_composite(values,
dtype=None,
name=None,
as_ref=False):
"""Converts `values` to a list of `Tensor` or `CompositeTensor` objects.
Any `CompositeTensor` objects in `values` are returned unmodified.
Args:
values: A list of `None`, `CompositeTensor`, or objects that can be consumed
by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor`s or
`CompositeTensor`s.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
as_ref: True if the caller wants the results as ref tensors.
Returns:
A list of `Tensor`, `CompositeTensor`, and/or `None` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
if not isinstance(values, collections.Sequence):
raise TypeError("values must be a sequence.")
ret = []
for i, value in enumerate(values):
if value is None:
ret.append(value)
else:
n = None if name is None else "%s_%d" % (name, i)
ret.append(
internal_convert_to_tensor_or_composite(
value, dtype=dtype, name=n, as_ref=as_ref))
return ret
def convert_n_to_tensor_or_composite(values, dtype=None, name=None):
"""Converts `values` to a list of `Output` or `CompositeTensor` objects.
Any `CompositeTensor` objects in `values` are returned unmodified.
Args:
values: A list of `None`, `CompositeTensor``, or objects that can be
consumed by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor`s or
`CompositeTensor`s.
name: (Optional.) A name prefix to used when a new `Tensor` is created, in
which case element `i` will be given the name `name + '_' + i`.
Returns:
A list of `Tensor` and/or `CompositeTensor` objects.
Raises:
TypeError: If no conversion function is registered for an element in
`values`.
RuntimeError: If a registered conversion function returns an invalid
value.
"""
return internal_convert_n_to_tensor_or_composite(
values=values, dtype=dtype, name=name, as_ref=False)
# TODO(josh11b): Add ctx argument to conversion_func() signature.
@tf_export("register_tensor_conversion_function")
def register_tensor_conversion_function(base_type,
conversion_func,
priority=100):
"""Registers a function for converting objects of `base_type` to `Tensor`.
The conversion function must have the following signature:
```python
def conversion_func(value, dtype=None, name=None, as_ref=False):
# ...
```
It must return a `Tensor` with the given `dtype` if specified. If the
conversion function creates a new `Tensor`, it should use the given
`name` if specified. All exceptions will be propagated to the caller.
The conversion function may return `NotImplemented` for some
inputs. In this case, the conversion process will continue to try
subsequent conversion functions.
If `as_ref` is true, the function must return a `Tensor` reference,
such as a `Variable`.
NOTE: The conversion functions will execute in order of priority,
followed by order of registration. To ensure that a conversion function
`F` runs before another conversion function `G`, ensure that `F` is
registered with a smaller priority than `G`.
Args:
base_type: The base type or tuple of base types for all objects that
`conversion_func` accepts.
conversion_func: A function that converts instances of `base_type` to
`Tensor`.
priority: Optional integer that indicates the priority for applying this
conversion function. Conversion functions with smaller priority values run
earlier than conversion functions with larger priority values. Defaults to
100.
Raises:
TypeError: If the arguments do not have the appropriate type.
"""
global _tensor_conversion_func_cache
with _tensor_conversion_func_lock:
if not (isinstance(base_type, type) or
(isinstance(base_type, tuple) and
all(isinstance(x, type) for x in base_type))):
raise TypeError("base_type must be a type or a tuple of types.")
if not callable(conversion_func):
raise TypeError("conversion_func must be callable.")
# context._context is checked so that we don't inadvertently create it.
# This is because enable_eager_execution will fail when called from the main
# function if the context._context is already created, and the
# register_tensor_conversion_function calls happen when the module is
# imported.
if context._context is not None and context.executing_eagerly(
) and isinstance(base_type, six.integer_types + (
float,
np.ndarray,
)):
# TODO(nareshmodi): consider setting a context variable which disables the
# fastpath instead.
raise TypeError(
"Cannot register conversions for numpy arrays, python number types "
"when executing eagerly.")
try:
funcs_at_priority = _tensor_conversion_func_registry[priority]
except KeyError:
funcs_at_priority = []
_tensor_conversion_func_registry[priority] = funcs_at_priority
funcs_at_priority.append((base_type, conversion_func))
_tensor_conversion_func_cache = {}
@tf_export("IndexedSlices")
class IndexedSlices(_TensorLike, composite_tensor.CompositeTensor):
"""A sparse representation of a set of tensor slices at given indices.
This class is a simple wrapper for a pair of `Tensor` objects:
* `values`: A `Tensor` of any dtype with shape `[D0, D1, ..., Dn]`.
* `indices`: A 1-D integer `Tensor` with shape `[D0]`.
An `IndexedSlices` is typically used to represent a subset of a larger
tensor `dense` of shape `[LARGE0, D1, .. , DN]` where `LARGE0 >> D0`.
The values in `indices` are the indices in the first dimension of
the slices that have been extracted from the larger tensor.
The dense tensor `dense` represented by an `IndexedSlices` `slices` has
```python
dense[slices.indices[i], :, :, :, ...] = slices.values[i, :, :, :, ...]
```
The `IndexedSlices` class is used principally in the definition of
gradients for operations that have sparse gradients
(e.g. `tf.gather`).
Contrast this representation with
`tf.SparseTensor`,
which uses multi-dimensional indices and scalar values.
"""
def __init__(self, values, indices, dense_shape=None):
"""Creates an `IndexedSlices`."""
if not isinstance(values, tensor_spec.TensorSpec):
_get_graph_from_inputs([values, indices, dense_shape])
self._values = values
self._indices = indices
self._dense_shape = dense_shape
@property
def values(self):
"""A `Tensor` containing the values of the slices."""
return self._values
@property
def indices(self):
"""A 1-D `Tensor` containing the indices of the slices."""
return self._indices
@property
def dense_shape(self):
"""A 1-D `Tensor` containing the shape of the corresponding dense tensor."""
return self._dense_shape
@property
def name(self):
"""The name of this `IndexedSlices`."""
return self.values.name
@property
def device(self):
"""The name of the device on which `values` will be produced, or `None`."""
return self.values.device
@property
def op(self):
"""The `Operation` that produces `values` as an output."""
return self.values.op
@property
def dtype(self):
"""The `DType` of elements in this tensor."""
return self.values.dtype
@property
def graph(self):
"""The `Graph` that contains the values, indices, and shape tensors."""
return self._values.graph
def __str__(self):
return "IndexedSlices(indices=%s, values=%s%s)" % (
self._indices, self._values,
(", dense_shape=%s" %
self._dense_shape) if self._dense_shape is not None else "")
def __neg__(self):
return IndexedSlices(-self.values, self.indices, self.dense_shape)
def _to_components(self):
if self._dense_shape is None:
return (self._values, self._indices)
else:
return (self._values, self._indices, self._dense_shape)
@classmethod
def _from_components(cls, components, metadata):
return cls(*components)
def _shape_invariant_to_components(self, shape=None):
if shape is None:
shape = self._values.shape
if self._dense_shape is None:
return (shape, shape[:1]) # values, indices
else:
# values, indices, dense_shape
return (shape, shape[:1], tensor_shape.TensorShape([shape.ndims]))
@property
def _is_graph_tensor(self):
return hasattr(self._values, "graph")
def consumers(self):
return self._consumers()
IndexedSlicesValue = collections.namedtuple(
"IndexedSlicesValue", ["values", "indices", "dense_shape"])
def _device_string(dev_spec):
if pydev.is_device_spec(dev_spec):
return dev_spec.to_string()
else:
return dev_spec
def _NodeDef(op_type, name, device=None, attrs=None): # pylint: disable=redefined-outer-name
"""Create a NodeDef proto.
Args:
op_type: Value for the "op" attribute of the NodeDef proto.
name: Value for the "name" attribute of the NodeDef proto.
device: string, device, or function from NodeDef to string. Value for the
"device" attribute of the NodeDef proto.
attrs: Optional dictionary where the key is the attribute name (a string)
and the value is the respective "attr" attribute of the NodeDef proto (an
AttrValue).
Returns:
A node_def_pb2.NodeDef protocol buffer.
"""
node_def = node_def_pb2.NodeDef()
node_def.op = compat.as_bytes(op_type)
node_def.name = compat.as_bytes(name)
if attrs is not None:
for k, v in six.iteritems(attrs):
node_def.attr[k].CopyFrom(v)
if device is not None:
if callable(device):
node_def.device = device(node_def)
else:
node_def.device = _device_string(device)
return node_def
# Copied from core/framework/node_def_util.cc
# TODO(mrry,josh11b): Consolidate this validation in C++ code.
_VALID_OP_NAME_REGEX = re.compile("^[A-Za-z0-9.][A-Za-z0-9_.\\-/]*$")
_VALID_SCOPE_NAME_REGEX = re.compile("^[A-Za-z0-9_.\\-/]*$")
def _create_c_op(graph, node_def, inputs, control_inputs):
"""Creates a TF_Operation.
Args:
graph: a `Graph`.
node_def: `node_def_pb2.NodeDef` for the operation to create.
inputs: A list of `Tensor`s (corresponding to scalar inputs) and lists of
`Tensor`s (corresponding to sequence inputs, e.g. "int64 * N",
"list(int64)"). The length of the list should be equal to the number of
inputs specified by this operation's op def.
control_inputs: A list of `Operation`s to set as control dependencies.
Returns:
A wrapped TF_Operation*.
"""
# pylint: disable=protected-access
op_desc = c_api.TF_NewOperation(graph._c_graph, compat.as_str(node_def.op),
compat.as_str(node_def.name))
if node_def.device:
c_api.TF_SetDevice(op_desc, compat.as_str(node_def.device))
# Add inputs
for op_input in inputs:
if isinstance(op_input, (list, tuple)):
c_api.TF_AddInputList(op_desc, [t._as_tf_output() for t in op_input])
else:
c_api.TF_AddInput(op_desc, op_input._as_tf_output())
# Add control inputs
for control_input in control_inputs:
c_api.TF_AddControlInput(op_desc, control_input._c_op)
# pylint: enable=protected-access
# Add attrs
for name, attr_value in node_def.attr.items():
serialized = attr_value.SerializeToString()
# TODO(skyewm): this creates and deletes a new TF_Status for every attr.
# It might be worth creating a convenient way to re-use the same status.
c_api.TF_SetAttrValueProto(op_desc, compat.as_str(name), serialized)
try:
c_op = c_api.TF_FinishOperation(op_desc)
except errors.InvalidArgumentError as e:
# Convert to ValueError for backwards compatibility.
raise ValueError(str(e))
return c_op
@tf_export("Operation")
class Operation(object):
"""Represents a graph node that performs computation on tensors.
An `Operation` is a node in a TensorFlow `Graph` that takes zero or
more `Tensor` objects as input, and produces zero or more `Tensor`
objects as output. Objects of type `Operation` are created by
calling a Python op constructor (such as
`tf.matmul`)
or `tf.Graph.create_op`.
For example `c = tf.matmul(a, b)` creates an `Operation` of type
"MatMul" that takes tensors `a` and `b` as input, and produces `c`
as output.
After the graph has been launched in a session, an `Operation` can
be executed by passing it to
`tf.Session.run`.
`op.run()` is a shortcut for calling
`tf.compat.v1.get_default_session().run(op)`.
"""
def __init__(self,
node_def,
g,
inputs=None,
output_types=None,
control_inputs=None,
input_types=None,
original_op=None,
op_def=None):
r"""Creates an `Operation`.
NOTE: This constructor validates the name of the `Operation` (passed
as `node_def.name`). Valid `Operation` names match the following
regular expression:
[A-Za-z0-9.][A-Za-z0-9_.\\-/]*
Args:
node_def: `node_def_pb2.NodeDef`. `NodeDef` for the `Operation`. Used for
attributes of `node_def_pb2.NodeDef`, typically `name`, `op`, and
`device`. The `input` attribute is irrelevant here as it will be
computed when generating the model.
g: `Graph`. The parent graph.
inputs: list of `Tensor` objects. The inputs to this `Operation`.
output_types: list of `DType` objects. List of the types of the `Tensors`
computed by this operation. The length of this list indicates the
number of output endpoints of the `Operation`.
control_inputs: list of operations or tensors from which to have a control
dependency.
input_types: List of `DType` objects representing the types of the tensors
accepted by the `Operation`. By default uses `[x.dtype.base_dtype for x
in inputs]`. Operations that expect reference-typed inputs must specify
these explicitly.
original_op: Optional. Used to associate the new `Operation` with an
existing `Operation` (for example, a replica with the op that was
replicated).
op_def: Optional. The `op_def_pb2.OpDef` proto that describes the op type
that this `Operation` represents.
Raises:
TypeError: if control inputs are not Operations or Tensors,
or if `node_def` is not a `NodeDef`,
or if `g` is not a `Graph`,
or if `inputs` are not tensors,
or if `inputs` and `input_types` are incompatible.
ValueError: if the `node_def` name is not valid.
"""
# For internal use only: `node_def` can be set to a TF_Operation to create
# an Operation for that op. This is useful for creating Operations for ops
# indirectly created by C API methods, e.g. the ops created by
# TF_ImportGraphDef. When `node_def` is a TF_Operation, all optional fields
# should be None.
if isinstance(node_def, node_def_pb2.NodeDef):
if node_def.ByteSize() >= (1 << 31) or node_def.ByteSize() < 0:
raise ValueError(
"Cannot create a tensor proto whose content is larger than 2GB.")
if not _VALID_OP_NAME_REGEX.match(node_def.name):
raise ValueError("'%s' is not a valid node name" % node_def.name)
c_op = None
elif type(node_def).__name__ == "SwigPyObject":
assert inputs is None
assert output_types is None
assert control_inputs is None
assert input_types is None
assert original_op is None
assert op_def is None
c_op = node_def
else:
raise TypeError("node_def needs to be a NodeDef: %s" % node_def)
if not isinstance(g, Graph):
raise TypeError("g needs to be a Graph: %s" % g)
self._graph = g
if inputs is None:
inputs = []
elif not isinstance(inputs, list):
raise TypeError("inputs needs to be a list of Tensors: %s" % inputs)
for a in inputs:
if not isinstance(a, Tensor):
raise TypeError("input needs to be a Tensor: %s" % a)
if input_types is None:
input_types = [i.dtype.base_dtype for i in inputs]
else:
if not all(
x.is_compatible_with(i.dtype) for i, x in zip(inputs, input_types)):
raise TypeError("In op '%s', input types (%s) are not compatible "
"with expected types (%s)" %
(node_def.name, [i.dtype for i in inputs], input_types))
# Build the list of control inputs.
control_input_ops = []
if control_inputs:
for c in control_inputs:
control_op = None
if isinstance(c, Operation):
control_op = c
elif isinstance(c, (Tensor, IndexedSlices)):
control_op = c.op
else:
raise TypeError("Control input must be an Operation, "
"a Tensor, or IndexedSlices: %s" % c)
control_input_ops.append(control_op)
# This will be set by self.inputs.
self._inputs_val = None
# pylint: disable=protected-access
self._id_value = self._graph._next_id()
self._original_op = original_op
self._traceback = tf_stack.extract_stack()
# List of _UserDevSpecs holding code location of device context manager
# invocations and the users original argument to them.
self._device_code_locations = None
# Dict mapping op name to file and line information for op colocation
# context managers.
self._colocation_code_locations = None
self._control_flow_context = self.graph._get_control_flow_context()
# pylint: enable=protected-access
# Initialize self._c_op.
if c_op:
self._c_op = c_op
else:
if op_def is None:
op_def = self._graph._get_op_def(node_def.op)
# TODO(skyewm): op_def_library.apply_op() flattens the incoming inputs.
# Refactor so we don't have to do this here.
grouped_inputs = self._reconstruct_sequence_inputs(
op_def, inputs, node_def.attr)
self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
control_input_ops)
# Initialize self._outputs.
num_outputs = c_api.TF_OperationNumOutputs(self._c_op)
output_types = [
c_api.TF_OperationOutputType(c_api_util.tf_output(self._c_op, i))
for i in range(num_outputs)
]
self._outputs = [
Tensor(self, i, output_type)
for i, output_type in enumerate(output_types)
]
self._graph._add_op(self) # pylint: disable=protected-access
if not c_op:
self._control_flow_post_processing()
def _control_flow_post_processing(self):
"""Add this op to its control flow context.
This may add new ops and change this op's inputs. self.inputs must be
available before calling this method.
"""
for input_tensor in self.inputs:
control_flow_util.CheckInputFromValidContext(self, input_tensor.op)
if self._control_flow_context is not None:
self._control_flow_context.AddOp(self)
def _reconstruct_sequence_inputs(self, op_def, inputs, attrs):
"""Regroups a flat list of input tensors into scalar and sequence inputs.
Args:
op_def: The `op_def_pb2.OpDef` (for knowing the input types)
inputs: a list of input `Tensor`s to the op.
attrs: mapping from attr name to `attr_value_pb2.AttrValue` (these define
how long each sequence is)
Returns:
A list of `Tensor`s (corresponding to scalar inputs) and lists of
`Tensor`s (corresponding to sequence inputs).
"""
grouped_inputs = []
i = 0
for input_arg in op_def.input_arg:
if input_arg.number_attr:
input_len = attrs[input_arg.number_attr].i
is_sequence = True
elif input_arg.type_list_attr:
input_len = len(attrs[input_arg.type_list_attr].list.type)
is_sequence = True
else:
input_len = 1
is_sequence = False
if is_sequence:
grouped_inputs.append(inputs[i:i + input_len])
else:
grouped_inputs.append(inputs[i])
i += input_len
assert i == len(inputs)
return grouped_inputs
def colocation_groups(self):
"""Returns the list of colocation groups of the op."""
default_colocation_group = [compat.as_bytes("loc:@%s" % self.name)]
try:
class_attr = self.get_attr("_class")
except ValueError:
# This op has no explicit colocation group, so it is itself its
# own root of a colocation group.
return default_colocation_group
attr_groups = [
class_name for class_name in class_attr
if class_name.startswith(b"loc:@")
]
# If there are no colocation groups in the explicit _class field,
# return the default colocation group.
return attr_groups if attr_groups else default_colocation_group
def values(self):
"""DEPRECATED: Use outputs."""
return tuple(self.outputs)
def _get_control_flow_context(self):
"""Returns the control flow context of this op.
Returns:
A context object.
"""
return self._control_flow_context
def _set_control_flow_context(self, ctx):
"""Sets the current control flow context of this op.
Args:
ctx: a context object.
"""
self._control_flow_context = ctx
@property
def name(self):
"""The full name of this operation."""
return c_api.TF_OperationName(self._c_op)
@property
def _id(self):
"""The unique integer id of this operation."""
return self._id_value
@property
def device(self):
"""The name of the device to which this op has been assigned, if any.
Returns:
The string name of the device to which this op has been
assigned, or an empty string if it has not been assigned to a
device.
"""
return c_api.TF_OperationDevice(self._c_op)
@property
def _device_assignments(self):
"""Code locations for device context managers active at op creation.
This property will return a list of traceable_stack.TraceableObject
instances where .obj is a string representing the assigned device
(or information about the function that would be applied to this op
to compute the desired device) and the filename and lineno members
record the location of the relevant device context manager.
For example, suppose file_a contained these lines:
file_a.py:
15: with tf.device('/gpu:0'):
16: node_b = tf.constant(4, name='NODE_B')
Then a TraceableObject t_obj representing the device context manager
would have these member values:
t_obj.obj -> '/gpu:0'
t_obj.filename = 'file_a.py'
t_obj.lineno = 15
and node_b.op._device_assignments would return the list [t_obj].
Returns:
[str: traceable_stack.TraceableObject, ...] as per this method's
description, above.
"""
return self._device_code_locations or []
@property
def _colocation_dict(self):
"""Code locations for colocation context managers active at op creation.
This property will return a dictionary for which the keys are nodes with
which this Operation is colocated, and for which the values are
traceable_stack.TraceableObject instances. The TraceableObject instances
record the location of the relevant colocation context manager but have the
"obj" field set to None to prevent leaking private data.
For example, suppose file_a contained these lines:
file_a.py:
14: node_a = tf.constant(3, name='NODE_A')
15: with tf.compat.v1.colocate_with(node_a):
16: node_b = tf.constant(4, name='NODE_B')
Then a TraceableObject t_obj representing the colocation context manager
would have these member values:
t_obj.obj -> None
t_obj.filename = 'file_a.py'
t_obj.lineno = 15
and node_b.op._colocation_dict would return the dictionary
{ 'NODE_A': t_obj }
Returns:
{str: traceable_stack.TraceableObject} as per this method's description,
above.
"""
locations_dict = self._colocation_code_locations or {}
return locations_dict.copy()
@property
def _output_types(self):
"""List this operation's output types.
Returns:
List of the types of the Tensors computed by this operation.
Each element in the list is an integer whose value is one of
the TF_DataType enums defined in c_api.h
The length of this list indicates the number of output endpoints
of the operation.
"""
num_outputs = c_api.TF_OperationNumOutputs(self._c_op)
output_types = [
c_api.TF_OperationOutputType(self._tf_output(i))
for i in xrange(num_outputs)
]
# In all the tests we have output_types that are passed into
# Operation.__init__ are a list of ints (which is illegal according
# to the docstring), but input_types are instances of DType.
# This extra assert is to catch if we ever use DType for output_types.
if output_types:
assert isinstance(output_types[0], int)
return output_types
def _tf_output(self, output_idx):
"""Create and return a new TF_Output for output_idx'th output of this op."""
tf_output = c_api.TF_Output()
tf_output.oper = self._c_op
tf_output.index = output_idx
return tf_output
def _tf_input(self, input_idx):
"""Create and return a new TF_Input for input_idx'th input of this op."""
tf_input = c_api.TF_Input()
tf_input.oper = self._c_op
tf_input.index = input_idx
return tf_input
def _set_device(self, device): # pylint: disable=redefined-outer-name
"""Set the device of this operation.
Args:
device: string or device.. The device to set.
"""
self._set_device_from_string(compat.as_str(_device_string(device)))
def _set_device_from_string(self, device_str):
"""Fast path to set device if the type is known to be a string.
This function is called frequently enough during graph construction that
there are non-trivial performance gains if the caller can guarantee that
the specified device is already a string.
Args:
device_str: A string specifying where to place this op.
"""
c_api.SetRequestedDevice(
self._graph._c_graph, # pylint: disable=protected-access
self._c_op, # pylint: disable=protected-access
device_str)
def _update_input(self, index, tensor):
"""Update the input to this operation at the given index.
NOTE: This is for TF internal use only. Please don't use it.
Args:
index: the index of the input to update.
tensor: the Tensor to be used as the input at the given index.
Raises:
TypeError: if tensor is not a Tensor,
or if input tensor type is not convertible to dtype.
ValueError: if the Tensor is from a different graph.
"""
if not isinstance(tensor, Tensor):
raise TypeError("tensor must be a Tensor: %s" % tensor)
_assert_same_graph(self, tensor)
# Reset cached inputs.
self._inputs_val = None
c_api.UpdateEdge(
self._graph._c_graph, # pylint: disable=protected-access
tensor._as_tf_output(), # pylint: disable=protected-access
self._tf_input(index))
def _add_while_inputs(self, tensors):
"""See AddWhileInputHack in python_api.h.
NOTE: This is for TF internal use only. Please don't use it.
Args:
tensors: list of Tensors
Raises:
TypeError: if tensor is not a Tensor,
or if input tensor type is not convertible to dtype.
ValueError: if the Tensor is from a different graph.
"""
for tensor in tensors:
if not isinstance(tensor, Tensor):
raise TypeError("tensor must be a Tensor: %s" % tensor)
_assert_same_graph(self, tensor)
# Reset cached inputs.
self._inputs_val = None
c_api.AddWhileInputHack(
self._graph._c_graph, # pylint: disable=protected-access
tensor._as_tf_output(), # pylint: disable=protected-access
self._c_op)
def _add_control_inputs(self, ops):
"""Add a list of new control inputs to this operation.
Args:
ops: the list of Operations to add as control input.
Raises:
TypeError: if ops is not a list of Operations.
ValueError: if any op in ops is from a different graph.
"""
for op in ops:
if not isinstance(op, Operation):
raise TypeError("op must be an Operation: %s" % op)
c_api.AddControlInput(self._graph._c_graph, self._c_op, op._c_op) # pylint: disable=protected-access
def _add_control_input(self, op):
"""Add a new control input to this operation.
Args:
op: the Operation to add as control input.
Raises:
TypeError: if op is not an Operation.
ValueError: if op is from a different graph.
"""
if not isinstance(op, Operation):
raise TypeError("op must be an Operation: %s" % op)
c_api.AddControlInput(self._graph._c_graph, self._c_op, op._c_op) # pylint: disable=protected-access
def _remove_all_control_inputs(self):
"""Removes any control inputs to this operation."""
c_api.RemoveAllControlInputs(self._graph._c_graph, self._c_op) # pylint: disable=protected-access
def _add_outputs(self, types, shapes):
"""Adds new Tensors to self.outputs.
Note: this is generally unsafe to use. This is used in certain situations in
conjunction with _set_type_list_attr.
Arguments:
types: list of DTypes
shapes: list of TensorShapes
"""
assert len(types) == len(shapes)
orig_num_outputs = len(self.outputs)
for i in range(len(types)):
t = Tensor(self, orig_num_outputs + i, types[i])
self._outputs.append(t)
t.set_shape(shapes[i])
def __str__(self):
return str(self.node_def)
def __repr__(self):
return "<tf.Operation '%s' type=%s>" % (self.name, self.type)
@property
def outputs(self):
"""The list of `Tensor` objects representing the outputs of this op."""
return self._outputs
# pylint: disable=protected-access
class _InputList(object):
"""Immutable input list wrapper."""
def __init__(self, inputs):
self._inputs = inputs
def __iter__(self):
return iter(self._inputs)
def __len__(self):
return len(self._inputs)
def __bool__(self):
return bool(self._inputs)
# Python 3 wants __bool__, Python 2.7 wants __nonzero__
__nonzero__ = __bool__
def __getitem__(self, i):
return self._inputs[i]
# pylint: enable=protected-access
@property
def inputs(self):
"""The list of `Tensor` objects representing the data inputs of this op."""
if self._inputs_val is None:
tf_outputs = c_api.GetOperationInputs(self._c_op)
# pylint: disable=protected-access
retval = [
self.graph._get_tensor_by_tf_output(tf_output)
for tf_output in tf_outputs
]
# pylint: enable=protected-access
self._inputs_val = Operation._InputList(retval)
return self._inputs_val
@property
def _inputs(self):
logging.warning("Operation._inputs is private, use Operation.inputs "
"instead. Operation._inputs will eventually be removed.")
return self.inputs
@_inputs.setter
def _inputs(self, value):
raise ValueError("Cannot assign _inputs")
@property
def _input_types(self):
num_inputs = c_api.TF_OperationNumInputs(self._c_op)
input_types = [
dtypes.as_dtype(c_api.TF_OperationInputType(self._tf_input(i)))
for i in xrange(num_inputs)
]
return input_types
@_input_types.setter
def _input_types(self, value):
raise ValueError("Cannot assign _input_types")
@property
def control_inputs(self):
"""The `Operation` objects on which this op has a control dependency.
Before this op is executed, TensorFlow will ensure that the
operations in `self.control_inputs` have finished executing. This
mechanism can be used to run ops sequentially for performance
reasons, or to ensure that the side effects of an op are observed
in the correct order.
Returns:
A list of `Operation` objects.
"""
control_c_ops = c_api.TF_OperationGetControlInputs_wrapper(self._c_op)
# pylint: disable=protected-access
return [
self.graph._get_operation_by_name_unsafe(c_api.TF_OperationName(c_op))
for c_op in control_c_ops
]
# pylint: enable=protected-access
@property
def _control_outputs(self):
"""The `Operation` objects which have a control dependency on this op.
Before any of the ops in self._control_outputs can execute tensorflow will
ensure self has finished executing.
Returns:
A list of `Operation` objects.
"""
control_c_ops = c_api.TF_OperationGetControlOutputs_wrapper(self._c_op)
# pylint: disable=protected-access
return [
self.graph._get_operation_by_name_unsafe(c_api.TF_OperationName(c_op))
for c_op in control_c_ops
]
# pylint: enable=protected-access
@property
def _control_inputs(self):
logging.warning("Operation._control_inputs is private, use "
"Operation.control_inputs instead. "
"Operation._control_inputs will eventually be removed.")
return self.control_inputs
@_control_inputs.setter
def _control_inputs(self, value):
logging.warning("Operation._control_inputs is private, use "
"Operation.control_inputs instead. "
"Operation._control_inputs will eventually be removed.")
# Copy value because it may be self._control_inputs_val (in particular if
# this is called from self._control_inputs += ...), and we don't want to
# clear value below.
value = copy.copy(value)
self._remove_all_control_inputs()
self._add_control_inputs(value)
@property
def type(self):
"""The type of the op (e.g. `"MatMul"`)."""
return c_api.TF_OperationOpType(self._c_op)
@property
def graph(self):
"""The `Graph` that contains this operation."""
return self._graph
@property
def node_def(self):
# pylint: disable=line-too-long
"""Returns the `NodeDef` representation of this operation.
Returns:
A
[`NodeDef`](https://www.tensorflow.org/code/tensorflow/core/framework/node_def.proto)
protocol buffer.
"""
# pylint: enable=line-too-long
with c_api_util.tf_buffer() as buf:
c_api.TF_OperationToNodeDef(self._c_op, buf)
data = c_api.TF_GetBuffer(buf)
node_def = node_def_pb2.NodeDef()
node_def.ParseFromString(compat.as_bytes(data))
return node_def
@property
def _node_def(self):
logging.warning("Operation._node_def is private, use Operation.node_def "
"instead. Operation._node_def will eventually be removed.")
return self.node_def
@property
def op_def(self):
# pylint: disable=line-too-long
"""Returns the `OpDef` proto that represents the type of this op.
Returns:
An
[`OpDef`](https://www.tensorflow.org/code/tensorflow/core/framework/op_def.proto)
protocol buffer.
"""
# pylint: enable=line-too-long
return self._graph._get_op_def(self.type)
@property
def _op_def(self):
logging.warning("Operation._op_def is private, use Operation.op_def "
"instead. Operation._op_def will eventually be removed.")
return self.op_def
@property
def traceback(self):
"""Returns the call stack from when this operation was constructed."""
return tf_stack.convert_stack(self._traceback)
@property
def traceback_with_start_lines(self):
"""Same as traceback but includes start line of function definition.
Returns:
A list of 5-tuples (filename, lineno, name, code, func_start_lineno).
"""
return tf_stack.convert_stack(
self._traceback, include_func_start_lineno=True)
def _set_attr(self, attr_name, attr_value):
"""Private method used to set an attribute in the node_def."""
buf = c_api.TF_NewBufferFromString(
compat.as_bytes(attr_value.SerializeToString()))
try:
# pylint: disable=protected-access
c_api.SetAttr(self._graph._c_graph, self._c_op, attr_name, buf)
# pylint: enable=protected-access
finally:
c_api.TF_DeleteBuffer(buf)
def _set_func_attr(self, attr_name, func_name):
"""Private method used to set a function attribute in the node_def."""
func = attr_value_pb2.NameAttrList(name=func_name)
self._set_attr(attr_name, attr_value_pb2.AttrValue(func=func))
def _set_func_list_attr(self, attr_name, func_names):
"""Private method used to set a list(function) attribute in the node_def."""
funcs = [attr_value_pb2.NameAttrList(name=func_name)
for func_name in func_names]
funcs_list = attr_value_pb2.AttrValue.ListValue(func=funcs)
self._set_attr(attr_name, attr_value_pb2.AttrValue(list=funcs_list))
def _set_type_list_attr(self, attr_name, types):
"""Private method used to set a list(type) attribute in the node_def."""
if not types:
return
if isinstance(types[0], dtypes.DType):
types = [dt.as_datatype_enum for dt in types]
types_list = attr_value_pb2.AttrValue.ListValue(type=types)
self._set_attr(attr_name, attr_value_pb2.AttrValue(list=types_list))
def _set_shape_list_attr(self, attr_name, shapes):
"""Private method used to set a list(shape) attribute in the node_def."""
shapes = [s.as_proto() for s in shapes]
shapes_list = attr_value_pb2.AttrValue.ListValue(shape=shapes)
self._set_attr(attr_name, attr_value_pb2.AttrValue(list=shapes_list))
def _clear_attr(self, attr_name):
"""Private method used to clear an attribute in the node_def."""
# pylint: disable=protected-access
c_api.ClearAttr(self._graph._c_graph, self._c_op, attr_name)
# pylint: enable=protected-access
def get_attr(self, name):
"""Returns the value of the attr of this op with the given `name`.
Args:
name: The name of the attr to fetch.
Returns:
The value of the attr, as a Python object.
Raises:
ValueError: If this op does not have an attr with the given `name`.
"""
fields = ("s", "i", "f", "b", "type", "shape", "tensor", "func")
try:
with c_api_util.tf_buffer() as buf:
c_api.TF_OperationGetAttrValueProto(self._c_op, name, buf)
data = c_api.TF_GetBuffer(buf)
except errors.InvalidArgumentError as e:
# Convert to ValueError for backwards compatibility.
raise ValueError(str(e))
x = attr_value_pb2.AttrValue()
x.ParseFromString(data)
oneof_value = x.WhichOneof("value")
if oneof_value is None:
return []
if oneof_value == "list":
for f in fields:
if getattr(x.list, f):
if f == "type":
return [dtypes.as_dtype(t) for t in x.list.type]
else:
return list(getattr(x.list, f))
return []
if oneof_value == "type":
return dtypes.as_dtype(x.type)
assert oneof_value in fields, "Unsupported field type in " + str(x)
return getattr(x, oneof_value)
def run(self, feed_dict=None, session=None):
"""Runs this operation in a `Session`.
Calling this method will execute all preceding operations that
produce the inputs needed for this operation.
*N.B.* Before invoking `Operation.run()`, its graph must have been
launched in a session, and either a default session must be
available, or `session` must be specified explicitly.
Args:
feed_dict: A dictionary that maps `Tensor` objects to feed values. See
`tf.Session.run` for a description of the valid feed values.
session: (Optional.) The `Session` to be used to run to this operation. If
none, the default session will be used.
"""
_run_using_default_session(self, feed_dict, self.graph, session)
_gradient_registry = registry.Registry("gradient")
@tf_export("RegisterGradient")
class RegisterGradient(object):
"""A decorator for registering the gradient function for an op type.
This decorator is only used when defining a new op type. For an op
with `m` inputs and `n` outputs, the gradient function is a function
that takes the original `Operation` and `n` `Tensor` objects
(representing the gradients with respect to each output of the op),
and returns `m` `Tensor` objects (representing the partial gradients
with respect to each input of the op).
For example, assuming that operations of type `"Sub"` take two
inputs `x` and `y`, and return a single output `x - y`, the
following gradient function would be registered:
```python
@tf.RegisterGradient("Sub")
def _sub_grad(unused_op, grad):
return grad, tf.negative(grad)
```
The decorator argument `op_type` is the string type of an
operation. This corresponds to the `OpDef.name` field for the proto
that defines the operation.
"""
def __init__(self, op_type):
"""Creates a new decorator with `op_type` as the Operation type.
Args:
op_type: The string type of an operation. This corresponds to the
`OpDef.name` field for the proto that defines the operation.
"""
if not isinstance(op_type, six.string_types):
raise TypeError("op_type must be a string")
self._op_type = op_type
def __call__(self, f):
"""Registers the function `f` as gradient function for `op_type`."""
_gradient_registry.register(f, self._op_type)
return f
@deprecation.deprecated_endpoints("NotDifferentiable", "NoGradient")
@tf_export("no_gradient", v1=["no_gradient", "NotDifferentiable", "NoGradient"])
def no_gradient(op_type):
"""Specifies that ops of type `op_type` is not differentiable.
This function should *not* be used for operations that have a
well-defined gradient that is not yet implemented.
This function is only used when defining a new op type. It may be
used for ops such as `tf.size()` that are not differentiable. For
example:
```python
tf.no_gradient("Size")
```
The gradient computed for 'op_type' will then propagate zeros.
For ops that have a well-defined gradient but are not yet implemented,
no declaration should be made, and an error *must* be thrown if
an attempt to request its gradient is made.
Args:
op_type: The string type of an operation. This corresponds to the
`OpDef.name` field for the proto that defines the operation.
Raises:
TypeError: If `op_type` is not a string.
"""
if not isinstance(op_type, six.string_types):
raise TypeError("op_type must be a string")
_gradient_registry.register(None, op_type)
# Aliases for the old names, will be eventually removed.
NoGradient = no_gradient
NotDifferentiable = no_gradient
def get_gradient_function(op):
"""Returns the function that computes gradients for "op"."""
if not op.inputs:
return None
try:
op_type = op.get_attr("_gradient_op_type")
except ValueError:
op_type = op.type
return _gradient_registry.lookup(op_type)
_shape_registry = registry.Registry("shape functions")
_default_shape_function_registry = registry.Registry("default shape functions")
# These are set to common_shapes.call_cpp_shape_fn by op generated code
# (generated by python_op_gen.cc).
# It is set outside ops.py to avoid a circular dependency.
_call_cpp_shape_fn = None
_call_cpp_shape_fn_and_require_op = None
def _set_call_cpp_shape_fn(call_cpp_shape_fn):
"""Sets default shape fns from passed common_shapes.call_cpp_shape_fn."""
global _call_cpp_shape_fn, _call_cpp_shape_fn_and_require_op
if _call_cpp_shape_fn:
return # already registered
def call_without_requiring(op):
return call_cpp_shape_fn(op, require_shape_fn=False)
_call_cpp_shape_fn = call_without_requiring
def call_with_requiring(op):
return call_cpp_shape_fn(op, require_shape_fn=True)
_call_cpp_shape_fn_and_require_op = call_with_requiring
class RegisterShape(object):
"""No longer used.
Was: A decorator for registering a shape function.
Shape functions must now be registered via the SetShapeFn on the
original Op specification in C++.
"""
def __init__(self, op_type):
"""Saves the `op_type` as the `Operation` type."""
if not isinstance(op_type, six.string_types):
raise TypeError("op_type must be a string")
self._op_type = op_type
def __call__(self, f):
"""Registers "f" as the shape function for "op_type"."""
if f is None:
assert _call_cpp_shape_fn
# None is a special "weak" value that provides a default shape function,
# and can be overridden by a non-None registration.
try:
_default_shape_function_registry.register(_call_cpp_shape_fn,
self._op_type)
except KeyError:
# Ignore duplicate registrations of the weak value. This can
# occur if the op library input to wrapper generation
# inadvertently links in one or more of the standard op
# libraries.
pass
else:
_shape_registry.register(f, self._op_type)
return f
def set_shape_and_handle_data_for_outputs(_):
"""No op. TODO(b/74620627): Remove this."""
pass
class OpStats(object):
"""A holder for statistics about an operator.
This class holds information about the resource requirements for an op,
including the size of its weight parameters on-disk and how many FLOPS it
requires to execute forward inference.
If you define a new operation, you can create a function that will return a
set of information about its usage of the CPU and disk space when serialized.
The function itself takes a Graph object that's been set up so you can call
methods like get_tensor_by_name to help calculate the results, and a NodeDef
argument.
"""
def __init__(self, statistic_type, value=None):
"""Sets up the initial placeholders for the statistics."""
self.statistic_type = statistic_type
self.value = value
@property
def statistic_type(self):
return self._statistic_type
@statistic_type.setter
def statistic_type(self, statistic_type):
self._statistic_type = statistic_type
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
def __iadd__(self, other):
if other.statistic_type != self.statistic_type:
raise ValueError("Can't add an OpStat of type %s to one of %s." %
(self.statistic_type, other.statistic_type))
if self.value is None:
self.value = other.value
elif other.value is not None:
self._value += other.value
return self
_stats_registry = registry.Registry("statistical functions")
class RegisterStatistics(object):
"""A decorator for registering the statistics function for an op type.
This decorator can be defined for an op type so that it gives a
report on the resources used by an instance of an operator, in the
form of an OpStats object.
Well-known types of statistics include these so far:
- flops: When running a graph, the bulk of the computation happens doing
numerical calculations like matrix multiplications. This type allows a node
to return how many floating-point operations it takes to complete. The
total number of FLOPs for a graph is a good guide to its expected latency.
You can add your own statistics just by picking a new type string, registering
functions for the ops you care about, and then calling get_stats_for_node_def.
If a statistic for an op is registered multiple times, a KeyError will be
raised.
Since the statistics is counted on a per-op basis. It is not suitable for
model parameters (capacity), which is expected to be counted only once, even
if it is shared by multiple ops. (e.g. RNN)
For example, you can define a new metric called doohickey for a Foo operation
by placing this in your code:
```python
@ops.RegisterStatistics("Foo", "doohickey")
def _calc_foo_bojangles(unused_graph, unused_node_def):
return ops.OpStats("doohickey", 20)
```
Then in client code you can retrieve the value by making this call:
```python
doohickey = ops.get_stats_for_node_def(graph, node_def, "doohickey")
```
If the NodeDef is for an op with a registered doohickey function, you'll get
back the calculated amount in doohickey.value, or None if it's not defined.
"""
def __init__(self, op_type, statistic_type):
"""Saves the `op_type` as the `Operation` type."""
if not isinstance(op_type, six.string_types):
raise TypeError("op_type must be a string.")
if "," in op_type:
raise TypeError("op_type must not contain a comma.")
self._op_type = op_type
if not isinstance(statistic_type, six.string_types):
raise TypeError("statistic_type must be a string.")
if "," in statistic_type:
raise TypeError("statistic_type must not contain a comma.")
self._statistic_type = statistic_type
def __call__(self, f):
"""Registers "f" as the statistics function for "op_type"."""
_stats_registry.register(f, self._op_type + "," + self._statistic_type)
return f
def get_stats_for_node_def(graph, node, statistic_type):
"""Looks up the node's statistics function in the registry and calls it.
This function takes a Graph object and a NodeDef from a GraphDef, and if
there's an associated statistics method, calls it and returns a result. If no
function has been registered for the particular node type, it returns an empty
statistics object.
Args:
graph: A Graph object that's been set up with the node's graph.
node: A NodeDef describing the operator.
statistic_type: A string identifying the statistic we're interested in.
Returns:
An OpStats object containing information about resource usage.
"""
try:
stats_func = _stats_registry.lookup(node.op + "," + statistic_type)
result = stats_func(graph, node)
except LookupError:
result = OpStats(statistic_type)
return result
def name_from_scope_name(name):
"""Returns the name of an op given the name of its scope.
Args:
name: the name of the scope.
Returns:
the name of the op (equal to scope name minus any trailing slash).
"""
return name[:-1] if (name and name[-1] == "/") else name
_MUTATION_LOCK_GROUP = 0
_SESSION_RUN_LOCK_GROUP = 1
@tf_export("Graph")
class Graph(object):
"""A TensorFlow computation, represented as a dataflow graph.
A `Graph` contains a set of
`tf.Operation` objects,
which represent units of computation; and
`tf.Tensor` objects, which represent
the units of data that flow between operations.
A default `Graph` is always registered, and accessible by calling
`tf.compat.v1.get_default_graph`.
To add an operation to the default graph, simply call one of the functions
that defines a new `Operation`:
```python
c = tf.constant(4.0)
assert c.graph is tf.compat.v1.get_default_graph()
```
Another typical usage involves the
`tf.Graph.as_default`
context manager, which overrides the current default graph for the
lifetime of the context:
```python
g = tf.Graph()
with g.as_default():
# Define operations and tensors in `g`.
c = tf.constant(30.0)
assert c.graph is g
```
Important note: This class *is not* thread-safe for graph construction. All
operations should be created from a single thread, or external
synchronization must be provided. Unless otherwise specified, all methods
are not thread-safe.
A `Graph` instance supports an arbitrary number of "collections"
that are identified by name. For convenience when building a large
graph, collections can store groups of related objects: for
example, the `tf.Variable` uses a collection (named
`tf.GraphKeys.GLOBAL_VARIABLES`) for
all variables that are created during the construction of a graph. The caller
may define additional collections by specifying a new name.
"""
def __init__(self):
"""Creates a new, empty Graph."""
# Protects core state that can be returned via public accessors.
# Thread-safety is provided on a best-effort basis to support buggy
# programs, and is not guaranteed by the public `tf.Graph` API.
#
# NOTE(mrry): This does not protect the various stacks. A warning will
# be reported if these are used from multiple threads
self._lock = threading.RLock()
# The group lock synchronizes Session.run calls with methods that create
# and mutate ops (e.g. Graph.create_op()). This synchronization is
# necessary because it's illegal to modify an operation after it's been run.
# The group lock allows any number of threads to mutate ops at the same time
# but if any modification is going on, all Session.run calls have to wait.
# Similarly, if one or more Session.run calls are going on, all mutate ops
# have to wait until all Session.run calls have finished.
self._group_lock = lock_util.GroupLock(num_groups=2)
self._nodes_by_id = {} # GUARDED_BY(self._lock)
self._next_id_counter = 0 # GUARDED_BY(self._lock)
self._nodes_by_name = {} # GUARDED_BY(self._lock)
self._version = 0 # GUARDED_BY(self._lock)
# Maps a name used in the graph to the next id to use for that name.
self._names_in_use = {}
self._stack_state_is_thread_local = False
self._thread_local = threading.local()
# Functions that will be applied to choose a device if none is specified.
# In TF2.x or after switch_to_thread_local(),
# self._thread_local._device_function_stack is used instead.
self._graph_device_function_stack = traceable_stack.TraceableStack()
# Default original_op applied to new ops.
self._default_original_op = None
# Current control flow context. It could be either CondContext or
# WhileContext defined in ops/control_flow_ops.py
self._control_flow_context = None
# A new node will depend of the union of all of the nodes in the stack.
# In TF2.x or after switch_to_thread_local(),
# self._thread_local._control_dependencies_stack is used instead.
self._graph_control_dependencies_stack = []
# Arbitrary collections of objects.
self._collections = {}
# The graph-level random seed
self._seed = None
# A dictionary of attributes that should be applied to all ops.
self._attr_scope_map = {}
# A map from op type to the kernel label that should be used.
self._op_to_kernel_label_map = {}
# A map from op type to an alternative op type that should be used when
# computing gradients.
self._gradient_override_map = {}
# True if the graph is considered "finalized". In that case no
# new operations can be added.
self._finalized = False
# Functions defined in the graph
self._functions = collections.OrderedDict()
# Default GraphDef versions
self._graph_def_versions = versions_pb2.VersionDef(
producer=versions.GRAPH_DEF_VERSION,
min_consumer=versions.GRAPH_DEF_VERSION_MIN_CONSUMER)
self._building_function = False
# Stack of colocate_with ops. In TF2.x or after switch_to_thread_local(),
# self._thread_local._colocation_stack is used instead.
self._graph_colocation_stack = traceable_stack.TraceableStack()
# Set of tensors that are dangerous to feed!
self._unfeedable_tensors = set()
# Set of operations that are dangerous to fetch!
self._unfetchable_ops = set()
# A map of tensor handle placeholder to tensor dtype.
self._handle_feeders = {}
# A map from tensor handle to its read op.
self._handle_readers = {}
# A map from tensor handle to its move op.
self._handle_movers = {}
# A map from tensor handle to its delete op.
self._handle_deleters = {}
# Allow optimizers and other objects to pseudo-uniquely key graphs (this key
# will be shared when defining function graphs, for example, so optimizers
# being called inside function definitions behave as if they were seeing the
# actual outside graph).
self._graph_key = "grap-key-%d/" % (uid(),)
# A string with the last reduction method passed to
# losses.compute_weighted_loss(), or None. This is required only for
# backward compatibility with Estimator and optimizer V1 use cases.
self._last_loss_reduction = None
# Flag that is used to indicate whether loss has been scaled by optimizer.
# If this flag has been set, then estimator uses it to scale losss back
# before reporting. This is required only for backward compatibility with
# Estimator and optimizer V1 use cases.
self._is_loss_scaled_by_optimizer = False
self._container = ""
self._registered_ops = op_def_registry.get_registered_ops()
# Set to True if this graph is being built in an
# AutomaticControlDependencies context.
self._add_control_dependencies = False
# TODO(skyewm): fold as much of the above as possible into the C
# implementation
self._scoped_c_graph = c_api_util.ScopedTFGraph()
# The C API requires all ops to have shape functions. Disable this
# requirement (many custom ops do not have shape functions, and we don't
# want to break these existing cases).
c_api.SetRequireShapeInferenceFns(self._c_graph, False)
if tf2.enabled():
self.switch_to_thread_local()
# Note: this method is private because the API of tf.Graph() is public and
# frozen, and this functionality is still not ready for public visibility.
@tf_contextlib.contextmanager
def _variable_creator_scope(self, creator, priority=100):
"""Scope which defines a variable creation function.
Args:
creator: A callable taking `next_creator` and `kwargs`. See the
`tf.variable_creator_scope` docstring.
priority: Creators with a higher `priority` are called first. Within the
same priority, creators are called inner-to-outer.
Yields:
`_variable_creator_scope` is a context manager with a side effect, but
doesn't return a value.
Raises:
RuntimeError: If variable creator scopes are not properly nested.
"""
# This step keeps a reference to the existing stack, and it also initializes
# self._thread_local._variable_creator_stack if it doesn't exist yet.
old = self._variable_creator_stack
new = list(old)
new.append((priority, creator))
# Sorting is stable, so we'll put higher-priority creators later in the list
# but otherwise maintain registration order.
new.sort(key=lambda item: item[0])
self._thread_local._variable_creator_stack = new # pylint: disable=protected-access
try:
yield
finally:
if self._thread_local._variable_creator_stack is not new: # pylint: disable=protected-access
raise RuntimeError(
"Exiting variable_creator_scope without proper nesting.")
self._thread_local._variable_creator_stack = old # pylint: disable=protected-access
# Note: this method is private because the API of tf.Graph() is public and
# frozen, and this functionality is still not ready for public visibility.
@property
def _variable_creator_stack(self):
if not hasattr(self._thread_local, "_variable_creator_stack"):
self._thread_local._variable_creator_stack = [] # pylint: disable=protected-access
# This previously returned a copy of the stack instead of the stack itself,
# to guard against accidental mutation. Consider, however, code that wants
# to save and restore the variable creator stack:
# def f():
# original_stack = graph._variable_creator_stack
# graph._variable_creator_stack = new_stack
# ... # Some code
# graph._variable_creator_stack = original_stack
#
# And lets say you have some code that calls this function with some
# variable_creator:
# def g():
# with variable_scope.variable_creator_scope(creator):
# f()
# When exiting the variable creator scope, it would see a different stack
# object than it expected leading to a "Exiting variable_creator_scope
# without proper nesting" error.
return self._thread_local._variable_creator_stack # pylint: disable=protected-access
@_variable_creator_stack.setter
def _variable_creator_stack(self, variable_creator_stack):
self._thread_local._variable_creator_stack = variable_creator_stack # pylint: disable=protected-access
def _check_not_finalized(self):
"""Check if the graph is finalized.
Raises:
RuntimeError: If the graph finalized.
"""
if self._finalized:
raise RuntimeError("Graph is finalized and cannot be modified.")
def _add_op(self, op):
"""Adds 'op' to the graph.
Args:
op: the Operator or Tensor to add.
Raises:
TypeError: if op is not an Operation or Tensor.
ValueError: if the op.name or op._id are already used.
"""
self._check_not_finalized()
if not isinstance(op, (Tensor, Operation)):
raise TypeError("op must be a Tensor or Operation: %s" % op)
with self._lock:
# pylint: disable=protected-access
if op._id in self._nodes_by_id:
raise ValueError("cannot add an op with id %d as it already "
"exists in the graph" % op._id)
if op.name in self._nodes_by_name:
raise ValueError("cannot add op with name %s as that name "
"is already used" % op.name)
self._nodes_by_id[op._id] = op
self._nodes_by_name[op.name] = op
self._version = max(self._version, op._id)
# pylint: enable=protected-access
@property
def _c_graph(self):
if self._scoped_c_graph:
return self._scoped_c_graph.graph
return None
@property
def version(self):
"""Returns a version number that increases as ops are added to the graph.
Note that this is unrelated to the
`tf.Graph.graph_def_versions`.
Returns:
An integer version that increases as ops are added to the graph.
"""
if self._finalized:
return self._version
with self._lock:
return self._version
@property
def graph_def_versions(self):
# pylint: disable=line-too-long
"""The GraphDef version information of this graph.
For details on the meaning of each version, see
[`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto).
Returns:
A `VersionDef`.
"""
# pylint: enable=line-too-long
with c_api_util.tf_buffer() as buf:
c_api.TF_GraphVersions(self._c_graph, buf)
data = c_api.TF_GetBuffer(buf)
version_def = versions_pb2.VersionDef()
version_def.ParseFromString(compat.as_bytes(data))
return version_def
@property
def seed(self):
"""The graph-level random seed of this graph."""
return self._seed
@seed.setter
def seed(self, seed):
self._seed = seed
@property
def finalized(self):
"""True if this graph has been finalized."""
return self._finalized
def finalize(self):
"""Finalizes this graph, making it read-only.
After calling `g.finalize()`, no new operations can be added to
`g`. This method is used to ensure that no operations are added
to a graph when it is shared between multiple threads, for example
when using a `tf.compat.v1.train.QueueRunner`.
"""
self._finalized = True
def _unsafe_unfinalize(self):
"""Opposite of `finalize`.
Internal interface.
NOTE: Unfinalizing a graph could have negative impact on performance,
especially in a multi-threaded environment. Unfinalizing a graph
when it is in use by a Session may lead to undefined behavior. Ensure
that all sessions using a graph are closed before calling this method.
"""
self._finalized = False
def _get_control_flow_context(self):
"""Returns the current control flow context.
Returns:
A context object.
"""
return self._control_flow_context
def _set_control_flow_context(self, ctx):
"""Sets the current control flow context.
Args:
ctx: a context object.
"""
self._control_flow_context = ctx
def _copy_functions_to_graph_def(self, graph_def, starting_bytesize):
"""If this graph contains functions, copy them to `graph_def`."""
bytesize = starting_bytesize
for f in self._functions.values():
bytesize += f.definition.ByteSize()
if bytesize >= (1 << 31) or bytesize < 0:
raise ValueError("GraphDef cannot be larger than 2GB.")
graph_def.library.function.extend([f.definition])
if f.grad_func_name:
grad_def = function_pb2.GradientDef()
grad_def.function_name = f.name
grad_def.gradient_func = f.grad_func_name
graph_def.library.gradient.extend([grad_def])
def _as_graph_def(self, from_version=None, add_shapes=False):
# pylint: disable=line-too-long
"""Returns a serialized `GraphDef` representation of this graph.
The serialized `GraphDef` can be imported into another `Graph`
(using `tf.import_graph_def`) or used with the
[C++ Session API](../../../../api_docs/cc/index.md).
This method is thread-safe.
Args:
from_version: Optional. If this is set, returns a `GraphDef` containing
only the nodes that were added to this graph since its `version`
property had the given value.
add_shapes: If true, adds an "_output_shapes" list attr to each node with
the inferred shapes of each of its outputs.
Returns:
A tuple containing a
[`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto)
protocol buffer, and the version of the graph to which that
`GraphDef` corresponds.
Raises:
ValueError: If the `graph_def` would be too large.
"""
# pylint: enable=line-too-long
with self._lock:
with c_api_util.tf_buffer() as buf:
c_api.TF_GraphToGraphDef(self._c_graph, buf)
data = c_api.TF_GetBuffer(buf)
graph = graph_pb2.GraphDef()
graph.ParseFromString(compat.as_bytes(data))
# Strip the experimental library field iff it's empty.
if not graph.library.function:
graph.ClearField("library")
if add_shapes:
for node in graph.node:
op = self._nodes_by_name[node.name]
if op.outputs:
node.attr["_output_shapes"].list.shape.extend(
[output.get_shape().as_proto() for output in op.outputs])
for function_def in graph.library.function:
defined_function = self._functions[function_def.signature.name]
try:
func_graph = defined_function.graph
except AttributeError:
# _DefinedFunction doesn't have a graph, _EagerDefinedFunction
# does. Both rely on ops.py, so we can't really isinstance check
# them.
continue
input_shapes = function_def.attr["_input_shapes"]
try:
func_graph_inputs = func_graph.inputs
except AttributeError:
continue
for input_tensor in func_graph_inputs:
if input_tensor.dtype == dtypes.resource:
# TODO(allenl): Save and restore handle data, then save the
# resource placeholder's shape. Right now some shape functions get
# confused if we set the shape of the resource placeholder (to a
# scalar of course) and there isn't any handle data.
input_shapes.list.shape.add().CopyFrom(
tensor_shape.TensorShape(None).as_proto())
else:
input_shapes.list.shape.add().CopyFrom(
input_tensor.get_shape().as_proto())
for node in function_def.node_def:
try:
op = func_graph.get_operation_by_name(node.name)
except KeyError:
continue
node.attr["_output_shapes"].list.shape.extend(
[output.get_shape().as_proto() for output in op.outputs])
return graph, self._version
def as_graph_def(self, from_version=None, add_shapes=False):
# pylint: disable=line-too-long
"""Returns a serialized `GraphDef` representation of this graph.
The serialized `GraphDef` can be imported into another `Graph`
(using `tf.import_graph_def`) or used with the
[C++ Session API](../../api_docs/cc/index.md).
This method is thread-safe.
Args:
from_version: Optional. If this is set, returns a `GraphDef` containing
only the nodes that were added to this graph since its `version`
property had the given value.
add_shapes: If true, adds an "_output_shapes" list attr to each node with
the inferred shapes of each of its outputs.
Returns:
A
[`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto)
protocol buffer.
Raises:
ValueError: If the `graph_def` would be too large.
"""
# pylint: enable=line-too-long
result, _ = self._as_graph_def(from_version, add_shapes)
return result
def _is_function(self, name):
"""Tests whether 'name' is registered in this graph's function library.
Args:
name: string op name.
Returns:
bool indicating whether or not 'name' is registered in function library.
"""
return compat.as_str(name) in self._functions
def _get_function(self, name):
"""Returns the function definition for 'name'.
Args:
name: string function name.
Returns:
The function def proto.
"""
return self._functions.get(compat.as_str(name), None)
def _add_function(self, function):
"""Adds a function to the graph.
After the function has been added, you can call to the function by
passing the function name in place of an op name to
`Graph.create_op()`.
Args:
function: A `_DefinedFunction` object.
Raises:
ValueError: if another function is defined with the same name.
"""
name = function.name
# Sanity checks on gradient definition.
if (function.grad_func_name is not None) and (function.python_grad_func is
not None):
raise ValueError("Gradient defined twice for function %s" % name)
# Add function to graph
# pylint: disable=protected-access
# Handle functions created without using the C API. TODO(apassos,skyewm)
# remove this when all functions are generated using the C API by default
# as this will be unnecessary.
if not function._c_func:
serialized = function.definition.SerializeToString()
c_func = c_api.TF_FunctionImportFunctionDef(serialized)
function._c_func = c_api_util.ScopedTFFunction(c_func)
gradient = (
function._grad_func._c_func.func if function._grad_func else None)
c_api.TF_GraphCopyFunction(self._c_graph, function._c_func.func, gradient)
# pylint: enable=protected-access
self._functions[compat.as_str(name)] = function
# Need a new-enough consumer to support the functions we add to the graph.
if self._graph_def_versions.min_consumer < 12:
self._graph_def_versions.min_consumer = 12
@property
def building_function(self):
"""Returns True iff this graph represents a function."""
return self._building_function
# Helper functions to create operations.
@deprecated_args(None,
"Shapes are always computed; don't use the compute_shapes "
"as it has no effect.", "compute_shapes")
def create_op(
self,
op_type,
inputs,
dtypes=None, # pylint: disable=redefined-outer-name
input_types=None,
name=None,
attrs=None,
op_def=None,
compute_shapes=True,
compute_device=True):
"""Creates an `Operation` in this graph.
This is a low-level interface for creating an `Operation`. Most
programs will not call this method directly, and instead use the
Python op constructors, such as `tf.constant()`, which add ops to
the default graph.
Args:
op_type: The `Operation` type to create. This corresponds to the
`OpDef.name` field for the proto that defines the operation.
inputs: A list of `Tensor` objects that will be inputs to the `Operation`.
dtypes: (Optional) A list of `DType` objects that will be the types of the
tensors that the operation produces.
input_types: (Optional.) A list of `DType`s that will be the types of the
tensors that the operation consumes. By default, uses the base `DType`
of each input in `inputs`. Operations that expect reference-typed inputs
must specify `input_types` explicitly.
name: (Optional.) A string name for the operation. If not specified, a
name is generated based on `op_type`.
attrs: (Optional.) A dictionary where the key is the attribute name (a
string) and the value is the respective `attr` attribute of the
`NodeDef` proto that will represent the operation (an `AttrValue`
proto).
op_def: (Optional.) The `OpDef` proto that describes the `op_type` that
the operation will have.
compute_shapes: (Optional.) Deprecated. Has no effect (shapes are always
computed).
compute_device: (Optional.) If True, device functions will be executed to
compute the device property of the Operation.
Raises:
TypeError: if any of the inputs is not a `Tensor`.
ValueError: if colocation conflicts with existing device assignment.
Returns:
An `Operation` object.
"""
del compute_shapes
self._check_not_finalized()
for idx, a in enumerate(inputs):
if not isinstance(a, Tensor):
raise TypeError("Input #%d is not a tensor: %s" % (idx, a))
if name is None:
name = op_type
# If a names ends with a '/' it is a "name scope" and we use it as-is,
# after removing the trailing '/'.
if name and name[-1] == "/":
name = name_from_scope_name(name)
else:
name = self.unique_name(name)
node_def = _NodeDef(op_type, name, device=None, attrs=attrs)
input_ops = set([t.op for t in inputs])
control_inputs = self._control_dependencies_for_inputs(input_ops)
# _create_op_helper mutates the new Operation. `_mutation_lock` ensures a
# Session.run call cannot occur between creating and mutating the op.
with self._mutation_lock():
ret = Operation(
node_def,
self,
inputs=inputs,
output_types=dtypes,
control_inputs=control_inputs,
input_types=input_types,
original_op=self._default_original_op,
op_def=op_def)
self._create_op_helper(ret, compute_device=compute_device)
return ret
def _create_op_from_tf_operation(self, c_op, compute_device=True):
"""Creates an `Operation` in this graph from the supplied TF_Operation.
This method is like create_op() except the new Operation is constructed
using `c_op`. The returned Operation will have `c_op` as its _c_op
field. This is used to create Operation objects around TF_Operations created
indirectly by the C API (e.g. by TF_ImportGraphDef, TF_FinishWhile).
This function does not call Operation._control_flow_post_processing or
Graph._control_dependencies_for_inputs (since the inputs may not be
available yet). The caller is responsible for calling these methods.
Args:
c_op: a wrapped TF_Operation
compute_device: (Optional.) If True, device functions will be executed to
compute the device property of the Operation.
Returns:
An `Operation` object.
"""
self._check_not_finalized()
ret = Operation(c_op, self)
# If a name_scope was created with ret.name but no nodes were created in it,
# the name will still appear in _names_in_use even though the name hasn't
# been used. This is ok, just leave _names_in_use as-is in this case.
# TODO(skyewm): make the C API guarantee no name conflicts.
name_key = ret.name.lower()
if name_key not in self._names_in_use:
self._names_in_use[name_key] = 1
self._create_op_helper(ret, compute_device=compute_device)
return ret
def _create_op_helper(self, op, compute_device=True):
"""Common logic for creating an op in this graph."""
# Apply any additional attributes requested. Do not overwrite any existing
# attributes.
for key, value in self._attr_scope_map.items():
try:
op.get_attr(key)
except ValueError:
if callable(value):
value = value(op.node_def)
if not isinstance(value, (type(None), attr_value_pb2.AttrValue)):
raise TypeError(
"Callable for scope map key '%s' must return either None or "
"an AttrValue protocol buffer; but it returned: %s" %
(key, value))
if value:
op._set_attr(key, value) # pylint: disable=protected-access
# Apply a kernel label if one has been specified for this op type.
try:
kernel_label = self._op_to_kernel_label_map[op.type]
op._set_attr("_kernel", # pylint: disable=protected-access
attr_value_pb2.AttrValue(s=compat.as_bytes(kernel_label)))
except KeyError:
pass
# Apply the overriding op type for gradients if one has been specified for
# this op type.
try:
mapped_op_type = self._gradient_override_map[op.type]
op._set_attr("_gradient_op_type", # pylint: disable=protected-access
attr_value_pb2.AttrValue(s=compat.as_bytes(mapped_op_type)))
except KeyError:
pass
self._record_op_seen_by_control_dependencies(op)
if compute_device:
self._apply_device_functions(op)
# Snapshot the colocation stack metadata before we might generate error
# messages using it. Note that this snapshot depends on the actual stack
# and is independent of the op's _class attribute.
# pylint: disable=protected-access
op._colocation_code_locations = self._snapshot_colocation_stack_metadata()
# pylint: enable=protected-access
if self._colocation_stack:
all_colocation_groups = []
for colocation_op in self._colocation_stack.peek_objs():
all_colocation_groups.extend(colocation_op.colocation_groups())
if colocation_op.device:
# pylint: disable=protected-access
op._set_device(colocation_op.device)
# pylint: enable=protected-access
all_colocation_groups = sorted(set(all_colocation_groups))
# pylint: disable=protected-access
op._set_attr(
"_class",
attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(s=all_colocation_groups)))
# pylint: enable=protected-access
# Sets "container" attribute if
# (1) self._container is not None
# (2) "is_stateful" is set in OpDef
# (3) "container" attribute is in OpDef
# (4) "container" attribute is None
if self._container and op.op_def.is_stateful:
try:
container_attr = op.get_attr("container")
except ValueError:
# "container" attribute is not in OpDef
pass
else:
if not container_attr:
op._set_attr("container", attr_value_pb2.AttrValue( # pylint: disable=protected-access
s=compat.as_bytes(self._container)))
def _add_new_tf_operations(self, compute_devices=True):
"""Creates `Operations` in this graph for any new TF_Operations.
This is useful for when TF_Operations are indirectly created by the C API
outside of the Operation constructor (e.g. by TF_ImportGraphDef,
TF_FinishWhile). This ensures there are corresponding Operations for all
TF_Operations in the underlying TF_Graph.
Args:
compute_devices: (Optional.) If True, device functions will be executed to
compute the device properties of each new Operation.
Returns:
A list of the new `Operation` objects.
"""
# Create all Operation objects before accessing their inputs since an op may
# be created before its inputs.
new_ops = [
self._create_op_from_tf_operation(c_op, compute_device=compute_devices)
for c_op in c_api_util.new_tf_operations(self)
]
# pylint: disable=protected-access
for op in new_ops:
new_control_inputs = self._control_dependencies_for_inputs(op.inputs)
op._add_control_inputs(new_control_inputs)
op._control_flow_post_processing()
# pylint: enable=protected-access
return new_ops
def as_graph_element(self, obj, allow_tensor=True, allow_operation=True):
"""Returns the object referred to by `obj`, as an `Operation` or `Tensor`.
This function validates that `obj` represents an element of this
graph, and gives an informative error message if it is not.
This function is the canonical way to get/validate an object of
one of the allowed types from an external argument reference in the
Session API.
This method may be called concurrently from multiple threads.
Args:
obj: A `Tensor`, an `Operation`, or the name of a tensor or operation. Can
also be any object with an `_as_graph_element()` method that returns a
value of one of these types.
allow_tensor: If true, `obj` may refer to a `Tensor`.
allow_operation: If true, `obj` may refer to an `Operation`.
Returns:
The `Tensor` or `Operation` in the Graph corresponding to `obj`.
Raises:
TypeError: If `obj` is not a type we support attempting to convert
to types.
ValueError: If `obj` is of an appropriate type but invalid. For
example, an invalid string.
KeyError: If `obj` is not an object in the graph.
"""
if self._finalized:
return self._as_graph_element_locked(obj, allow_tensor, allow_operation)
with self._lock:
return self._as_graph_element_locked(obj, allow_tensor, allow_operation)
def _as_graph_element_locked(self, obj, allow_tensor, allow_operation):
"""See `Graph.as_graph_element()` for details."""
# The vast majority of this function is figuring
# out what an API user might be doing wrong, so
# that we can give helpful error messages.
#
# Ideally, it would be nice to split it up, but we
# need context to generate nice error messages.
if allow_tensor and allow_operation:
types_str = "Tensor or Operation"
elif allow_tensor:
types_str = "Tensor"
elif allow_operation:
types_str = "Operation"
else:
raise ValueError("allow_tensor and allow_operation can't both be False.")
temp_obj = _as_graph_element(obj)
if temp_obj is not None:
obj = temp_obj
# If obj appears to be a name...
if isinstance(obj, compat.bytes_or_text_types):
name = compat.as_str(obj)
if ":" in name and allow_tensor:
# Looks like a Tensor name and can be a Tensor.
try:
op_name, out_n = name.split(":")
out_n = int(out_n)
except:
raise ValueError("The name %s looks a like a Tensor name, but is "
"not a valid one. Tensor names must be of the "
"form \"<op_name>:<output_index>\"." % repr(name))
if op_name in self._nodes_by_name:
op = self._nodes_by_name[op_name]
else:
raise KeyError("The name %s refers to a Tensor which does not "
"exist. The operation, %s, does not exist in the "
"graph." % (repr(name), repr(op_name)))
try:
return op.outputs[out_n]
except:
raise KeyError("The name %s refers to a Tensor which does not "
"exist. The operation, %s, exists but only has "
"%s outputs." %
(repr(name), repr(op_name), len(op.outputs)))
elif ":" in name and not allow_tensor:
# Looks like a Tensor name but can't be a Tensor.
raise ValueError("Name %s appears to refer to a Tensor, not a %s." %
(repr(name), types_str))
elif ":" not in name and allow_operation:
# Looks like an Operation name and can be an Operation.
if name not in self._nodes_by_name:
raise KeyError("The name %s refers to an Operation not in the "
"graph." % repr(name))
return self._nodes_by_name[name]
elif ":" not in name and not allow_operation:
# Looks like an Operation name but can't be an Operation.
if name in self._nodes_by_name:
# Yep, it's an Operation name
err_msg = ("The name %s refers to an Operation, not a %s." %
(repr(name), types_str))
else:
err_msg = ("The name %s looks like an (invalid) Operation name, "
"not a %s." % (repr(name), types_str))
err_msg += (" Tensor names must be of the form "
"\"<op_name>:<output_index>\".")
raise ValueError(err_msg)
elif isinstance(obj, Tensor) and allow_tensor:
# Actually obj is just the object it's referring to.
if obj.graph is not self:
raise ValueError("Tensor %s is not an element of this graph." % obj)
return obj
elif isinstance(obj, Operation) and allow_operation:
# Actually obj is just the object it's referring to.
if obj.graph is not self:
raise ValueError("Operation %s is not an element of this graph." % obj)
return obj
else:
# We give up!
raise TypeError("Can not convert a %s into a %s." %
(type(obj).__name__, types_str))
def get_operations(self):
"""Return the list of operations in the graph.
You can modify the operations in place, but modifications
to the list such as inserts/delete have no effect on the
list of operations known to the graph.
This method may be called concurrently from multiple threads.
Returns:
A list of Operations.
"""
if self._finalized:
return list(self._nodes_by_id.values())
with self._lock:
return list(self._nodes_by_id.values())
def get_operation_by_name(self, name):
"""Returns the `Operation` with the given `name`.
This method may be called concurrently from multiple threads.
Args:
name: The name of the `Operation` to return.
Returns:
The `Operation` with the given `name`.
Raises:
TypeError: If `name` is not a string.
KeyError: If `name` does not correspond to an operation in this graph.
"""
if not isinstance(name, six.string_types):
raise TypeError("Operation names are strings (or similar), not %s." %
type(name).__name__)
return self.as_graph_element(name, allow_tensor=False, allow_operation=True)
def _get_operation_by_name_unsafe(self, name):
"""Returns the `Operation` with the given `name`.
This is a internal unsafe version of get_operation_by_name. It skips many
checks and does not have user friedly error messages but runs considerably
faster. This method may be called concurrently from multiple threads.
Args:
name: The name of the `Operation` to return.
Returns:
The `Operation` with the given `name`.
Raises:
KeyError: If `name` does not correspond to an operation in this graph.
"""
if self._finalized:
return self._nodes_by_name[name]
with self._lock:
return self._nodes_by_name[name]
def _get_operation_by_tf_operation(self, tf_oper):
op_name = c_api.TF_OperationName(tf_oper)
return self._get_operation_by_name_unsafe(op_name)
def get_tensor_by_name(self, name):
"""Returns the `Tensor` with the given `name`.
This method may be called concurrently from multiple threads.
Args:
name: The name of the `Tensor` to return.
Returns:
The `Tensor` with the given `name`.
Raises:
TypeError: If `name` is not a string.
KeyError: If `name` does not correspond to a tensor in this graph.
"""
# Names should be strings.
if not isinstance(name, six.string_types):
raise TypeError("Tensor names are strings (or similar), not %s." %
type(name).__name__)
return self.as_graph_element(name, allow_tensor=True, allow_operation=False)
def _get_tensor_by_tf_output(self, tf_output):
"""Returns the `Tensor` representing `tf_output`.
Note that there is only one such `Tensor`, i.e. multiple calls to this
function with the same TF_Output value will always return the same `Tensor`
object.
Args:
tf_output: A wrapped `TF_Output` (the C API equivalent of `Tensor`).
Returns:
The `Tensor` that represents `tf_output`.
"""
op = self._get_operation_by_tf_operation(tf_output.oper)
return op.outputs[tf_output.index]
def _next_id(self):
"""Id for next Operation instance. Also increments the internal id."""
self._check_not_finalized()
with self._lock:
self._next_id_counter += 1
return self._next_id_counter
@property
def _last_id(self):
return self._next_id_counter
def _get_op_def(self, type): # pylint: disable=redefined-builtin
"""Returns the `OpDef` proto for `type`. `type` is a string."""
with c_api_util.tf_buffer() as buf:
# pylint: disable=protected-access
c_api.TF_GraphGetOpDef(self._c_graph, compat.as_bytes(type), buf)
# pylint: enable=protected-access
data = c_api.TF_GetBuffer(buf)
op_def = op_def_pb2.OpDef()
op_def.ParseFromString(compat.as_bytes(data))
return op_def
def as_default(self):
"""Returns a context manager that makes this `Graph` the default graph.
This method should be used if you want to create multiple graphs
in the same process. For convenience, a global default graph is
provided, and all ops will be added to this graph if you do not
create a new graph explicitly.
Use this method with the `with` keyword to specify that ops created within
the scope of a block should be added to this graph. In this case, once
the scope of the `with` is exited, the previous default graph is set again
as default. There is a stack, so it's ok to have multiple nested levels
of `as_default` calls.
The default graph is a property of the current thread. If you
create a new thread, and wish to use the default graph in that
thread, you must explicitly add a `with g.as_default():` in that
thread's function.
The following code examples are equivalent:
```python
# 1. Using Graph.as_default():
g = tf.Graph()
with g.as_default():
c = tf.constant(5.0)
assert c.graph is g
# 2. Constructing and making default:
with tf.Graph().as_default() as g:
c = tf.constant(5.0)
assert c.graph is g
```
If eager execution is enabled ops created under this context manager will be
added to the graph instead of executed eagerly.
Returns:
A context manager for using this graph as the default graph.
"""
return _default_graph_stack.get_controller(self)
@property
def collections(self):
"""Returns the names of the collections known to this graph."""
return list(self._collections)
def add_to_collection(self, name, value):
"""Stores `value` in the collection with the given `name`.
Note that collections are not sets, so it is possible to add a value to
a collection several times.
Args:
name: The key for the collection. The `GraphKeys` class contains many
standard names for collections.
value: The value to add to the collection.
""" # pylint: disable=g-doc-exception
self._check_not_finalized()
with self._lock:
if name not in self._collections:
self._collections[name] = [value]
else:
self._collections[name].append(value)
def add_to_collections(self, names, value):
"""Stores `value` in the collections given by `names`.
Note that collections are not sets, so it is possible to add a value to
a collection several times. This function makes sure that duplicates in
`names` are ignored, but it will not check for pre-existing membership of
`value` in any of the collections in `names`.
`names` can be any iterable, but if `names` is a string, it is treated as a
single collection name.
Args:
names: The keys for the collections to add to. The `GraphKeys` class
contains many standard names for collections.
value: The value to add to the collections.
"""
# Make sure names are unique, but treat strings as a single collection name
names = (names,) if isinstance(names, six.string_types) else set(names)
for name in names:
self.add_to_collection(name, value)
def get_collection_ref(self, name):
"""Returns a list of values in the collection with the given `name`.
If the collection exists, this returns the list itself, which can
be modified in place to change the collection. If the collection does
not exist, it is created as an empty list and the list is returned.
This is different from `get_collection()` which always returns a copy of
the collection list if it exists and never creates an empty collection.
Args:
name: The key for the collection. For example, the `GraphKeys` class
contains many standard names for collections.
Returns:
The list of values in the collection with the given `name`, or an empty
list if no value has been added to that collection.
""" # pylint: disable=g-doc-exception
with self._lock:
coll_list = self._collections.get(name, None)
if coll_list is None:
coll_list = []
self._collections[name] = coll_list
return coll_list
def get_collection(self, name, scope=None):
"""Returns a list of values in the collection with the given `name`.
This is different from `get_collection_ref()` which always returns the
actual collection list if it exists in that it returns a new list each time
it is called.
Args:
name: The key for the collection. For example, the `GraphKeys` class
contains many standard names for collections.
scope: (Optional.) A string. If supplied, the resulting list is filtered
to include only items whose `name` attribute matches `scope` using
`re.match`. Items without a `name` attribute are never returned if a
scope is supplied. The choice of `re.match` means that a `scope` without
special tokens filters by prefix.
Returns:
The list of values in the collection with the given `name`, or
an empty list if no value has been added to that collection. The
list contains the values in the order under which they were
collected.
""" # pylint: disable=g-doc-exception
with self._lock:
collection = self._collections.get(name, None)
if collection is None:
return []
if scope is None:
return list(collection)
else:
c = []
regex = re.compile(scope)
for item in collection:
if hasattr(item, "name") and regex.match(item.name):
c.append(item)
return c
def get_all_collection_keys(self):
"""Returns a list of collections used in this graph."""
with self._lock:
return [x for x in self._collections if isinstance(x, six.string_types)]
def clear_collection(self, name):
"""Clears all values in a collection.
Args:
name: The key for the collection. The `GraphKeys` class contains many
standard names for collections.
"""
self._check_not_finalized()
with self._lock:
if name in self._collections:
del self._collections[name]
@tf_contextlib.contextmanager
def _original_op(self, op):
"""Python 'with' handler to help annotate ops with their originator.
An op may have an 'original_op' property that indicates the op on which
it was based. For example a replica op is based on the op that was
replicated and a gradient op is based on the op that was differentiated.
All ops created in the scope of this 'with' handler will have
the given 'op' as their original op.
Args:
op: The Operation that all ops created in this scope will have as their
original op.
Yields:
Nothing.
"""
old_original_op = self._default_original_op
self._default_original_op = op
try:
yield
finally:
self._default_original_op = old_original_op
@property
def _name_stack(self):
# This may be called from a thread where name_stack doesn't yet exist.
if not hasattr(self._thread_local, "_name_stack"):
self._thread_local._name_stack = ""
return self._thread_local._name_stack
@_name_stack.setter
def _name_stack(self, name_stack):
self._thread_local._name_stack = name_stack
# pylint: disable=g-doc-return-or-yield,line-too-long
@tf_contextlib.contextmanager
def name_scope(self, name):
"""Returns a context manager that creates hierarchical names for operations.
A graph maintains a stack of name scopes. A `with name_scope(...):`
statement pushes a new name onto the stack for the lifetime of the context.
The `name` argument will be interpreted as follows:
* A string (not ending with '/') will create a new name scope, in which
`name` is appended to the prefix of all operations created in the
context. If `name` has been used before, it will be made unique by
calling `self.unique_name(name)`.
* A scope previously captured from a `with g.name_scope(...) as
scope:` statement will be treated as an "absolute" name scope, which
makes it possible to re-enter existing scopes.
* A value of `None` or the empty string will reset the current name scope
to the top-level (empty) name scope.
For example:
```python
with tf.Graph().as_default() as g:
c = tf.constant(5.0, name="c")
assert c.op.name == "c"
c_1 = tf.constant(6.0, name="c")
assert c_1.op.name == "c_1"
# Creates a scope called "nested"
with g.name_scope("nested") as scope:
nested_c = tf.constant(10.0, name="c")
assert nested_c.op.name == "nested/c"
# Creates a nested scope called "inner".
with g.name_scope("inner"):
nested_inner_c = tf.constant(20.0, name="c")
assert nested_inner_c.op.name == "nested/inner/c"
# Create a nested scope called "inner_1".
with g.name_scope("inner"):
nested_inner_1_c = tf.constant(30.0, name="c")
assert nested_inner_1_c.op.name == "nested/inner_1/c"
# Treats `scope` as an absolute name scope, and
# switches to the "nested/" scope.
with g.name_scope(scope):
nested_d = tf.constant(40.0, name="d")
assert nested_d.op.name == "nested/d"
with g.name_scope(""):
e = tf.constant(50.0, name="e")
assert e.op.name == "e"
```
The name of the scope itself can be captured by `with
g.name_scope(...) as scope:`, which stores the name of the scope
in the variable `scope`. This value can be used to name an
operation that represents the overall result of executing the ops
in a scope. For example:
```python
inputs = tf.constant(...)
with g.name_scope('my_layer') as scope:
weights = tf.Variable(..., name="weights")
biases = tf.Variable(..., name="biases")
affine = tf.matmul(inputs, weights) + biases
output = tf.nn.relu(affine, name=scope)
```
NOTE: This constructor validates the given `name`. Valid scope
names match one of the following regular expressions:
[A-Za-z0-9.][A-Za-z0-9_.\\-/]* (for scopes at the root)
[A-Za-z0-9_.\\-/]* (for other scopes)
Args:
name: A name for the scope.
Returns:
A context manager that installs `name` as a new name scope.
Raises:
ValueError: If `name` is not a valid scope name, according to the rules
above.
"""
if name:
if isinstance(name, compat.bytes_or_text_types):
name = compat.as_str(name)
if self._name_stack:
# Scopes created in a nested scope may have initial characters
# that are illegal as the initial character of an op name
# (viz. '-', '\', '/', and '_').
if not _VALID_SCOPE_NAME_REGEX.match(name):
raise ValueError("'%s' is not a valid scope name" % name)
else:
# Scopes created in the root must match the more restrictive
# op name regex, which constrains the initial character.
if not _VALID_OP_NAME_REGEX.match(name):
raise ValueError("'%s' is not a valid scope name" % name)
old_stack = self._name_stack
if not name: # Both for name=None and name="" we re-set to empty scope.
new_stack = None
elif name[-1] == "/":
new_stack = name_from_scope_name(name)
else:
new_stack = self.unique_name(name)
self._name_stack = new_stack
try:
yield "" if new_stack is None else new_stack + "/"
finally:
self._name_stack = old_stack
# pylint: enable=g-doc-return-or-yield,line-too-long
def unique_name(self, name, mark_as_used=True):
"""Return a unique operation name for `name`.
Note: You rarely need to call `unique_name()` directly. Most of
the time you just need to create `with g.name_scope()` blocks to
generate structured names.
`unique_name` is used to generate structured names, separated by
`"/"`, to help identify operations when debugging a graph.
Operation names are displayed in error messages reported by the
TensorFlow runtime, and in various visualization tools such as
TensorBoard.
If `mark_as_used` is set to `True`, which is the default, a new
unique name is created and marked as in use. If it's set to `False`,
the unique name is returned without actually being marked as used.
This is useful when the caller simply wants to know what the name
to be created will be.
Args:
name: The name for an operation.
mark_as_used: Whether to mark this name as being used.
Returns:
A string to be passed to `create_op()` that will be used
to name the operation being created.
"""
if self._name_stack:
name = self._name_stack + "/" + name
# For the sake of checking for names in use, we treat names as case
# insensitive (e.g. foo = Foo).
name_key = name.lower()
i = self._names_in_use.get(name_key, 0)
# Increment the number for "name_key".
if mark_as_used:
self._names_in_use[name_key] = i + 1
if i > 0:
base_name_key = name_key
# Make sure the composed name key is not already used.
while name_key in self._names_in_use:
name_key = "%s_%d" % (base_name_key, i)
i += 1
# Mark the composed name_key as used in case someone wants
# to call unique_name("name_1").
if mark_as_used:
self._names_in_use[name_key] = 1
# Return the new name with the original capitalization of the given name.
name = "%s_%d" % (name, i - 1)
return name
def get_name_scope(self):
"""Returns the current name scope.
For example:
```python
with tf.name_scope('scope1'):
with tf.name_scope('scope2'):
print(tf.compat.v1.get_default_graph().get_name_scope())
```
would print the string `scope1/scope2`.
Returns:
A string representing the current name scope.
"""
return self._name_stack
@tf_contextlib.contextmanager
def _colocate_with_for_gradient(self, op, gradient_uid,
ignore_existing=False):
with self.colocate_with(op, ignore_existing):
if gradient_uid is not None and self._control_flow_context is not None:
self._control_flow_context.EnterGradientColocation(op, gradient_uid)
try:
yield
finally:
self._control_flow_context.ExitGradientColocation(op, gradient_uid)
else:
yield
@tf_contextlib.contextmanager
def colocate_with(self, op, ignore_existing=False):
"""Returns a context manager that specifies an op to colocate with.
Note: this function is not for public use, only for internal libraries.
For example:
```python
a = tf.Variable([1.0])
with g.colocate_with(a):
b = tf.constant(1.0)
c = tf.add(a, b)
```
`b` and `c` will always be colocated with `a`, no matter where `a`
is eventually placed.
**NOTE** Using a colocation scope resets any existing device constraints.
If `op` is `None` then `ignore_existing` must be `True` and the new
scope resets all colocation and device constraints.
Args:
op: The op to colocate all created ops with, or `None`.
ignore_existing: If true, only applies colocation of this op within the
context, rather than applying all colocation properties on the stack.
If `op` is `None`, this value must be `True`.
Raises:
ValueError: if op is None but ignore_existing is False.
Yields:
A context manager that specifies the op with which to colocate
newly created ops.
"""
if op is None and not ignore_existing:
raise ValueError("Trying to reset colocation (op is None) but "
"ignore_existing is not True")
op = _op_to_colocate_with(op)
# By default, colocate_with resets the device function stack,
# since colocate_with is typically used in specific internal
# library functions where colocation is intended to be "stronger"
# than device functions.
#
# In the future, a caller may specify that device_functions win
# over colocation, in which case we can add support.
device_fn_tmp = self._device_function_stack
self._device_function_stack = traceable_stack.TraceableStack()
if ignore_existing:
current_stack = self._colocation_stack
self._colocation_stack = traceable_stack.TraceableStack()
if op is not None:
# offset refers to the stack frame used for storing code location.
# We use 4, the sum of 1 to use our caller's stack frame and 3
# to jump over layers of context managers above us.
self._colocation_stack.push_obj(op, offset=4)
try:
yield
finally:
# Restore device function stack
self._device_function_stack = device_fn_tmp
if op is not None:
self._colocation_stack.pop_obj()
# Reset the colocation stack if requested.
if ignore_existing:
self._colocation_stack = current_stack
def _add_device_to_stack(self, device_name_or_function, offset=0):
"""Add device to stack manually, separate from a context manager."""
total_offset = 1 + offset
spec = _UserDeviceSpec(device_name_or_function)
self._device_function_stack.push_obj(spec, offset=total_offset)
return spec
@tf_contextlib.contextmanager
def device(self, device_name_or_function):
# pylint: disable=line-too-long
"""Returns a context manager that specifies the default device to use.
The `device_name_or_function` argument may either be a device name
string, a device function, or None:
* If it is a device name string, all operations constructed in
this context will be assigned to the device with that name, unless
overridden by a nested `device()` context.
* If it is a function, it will be treated as a function from
Operation objects to device name strings, and invoked each time
a new Operation is created. The Operation will be assigned to
the device with the returned name.
* If it is None, all `device()` invocations from the enclosing context
will be ignored.
For information about the valid syntax of device name strings, see
the documentation in
[`DeviceNameUtils`](https://www.tensorflow.org/code/tensorflow/core/util/device_name_utils.h).
For example:
```python
with g.device('/device:GPU:0'):
# All operations constructed in this context will be placed
# on GPU 0.
with g.device(None):
# All operations constructed in this context will have no
# assigned device.
# Defines a function from `Operation` to device string.
def matmul_on_gpu(n):
if n.type == "MatMul":
return "/device:GPU:0"
else:
return "/cpu:0"
with g.device(matmul_on_gpu):
# All operations of type "MatMul" constructed in this context
# will be placed on GPU 0; all other operations will be placed
# on CPU 0.
```
**N.B.** The device scope may be overridden by op wrappers or
other library code. For example, a variable assignment op
`v.assign()` must be colocated with the `tf.Variable` `v`, and
incompatible device scopes will be ignored.
Args:
device_name_or_function: The device name or function to use in the
context.
Yields:
A context manager that specifies the default device to use for newly
created ops.
Raises:
RuntimeError: If device scopes are not properly nested.
"""
self._add_device_to_stack(device_name_or_function, offset=2)
old_top_of_stack = self._device_function_stack.peek_top_obj()
try:
yield
finally:
new_top_of_stack = self._device_function_stack.peek_top_obj()
if old_top_of_stack is not new_top_of_stack:
raise RuntimeError("Exiting device scope without proper scope nesting.")
self._device_function_stack.pop_obj()
def _apply_device_functions(self, op):
"""Applies the current device function stack to the given operation."""
# Apply any device functions in LIFO order, so that the most recently
# pushed function has the first chance to apply a device to the op.
# We apply here because the result can depend on the Operation's
# signature, which is computed in the Operation constructor.
# pylint: disable=protected-access
prior_device_string = None
for device_spec in self._device_function_stack.peek_objs():
if device_spec.is_null_merge:
continue
if device_spec.function is None:
break
device_string = device_spec.string_merge(op)
# Take advantage of the fact that None is a singleton and Python interns
# strings, since identity checks are faster than equality checks.
if device_string is not prior_device_string:
op._set_device_from_string(device_string)
prior_device_string = device_string
op._device_code_locations = self._snapshot_device_function_stack_metadata()
# pylint: enable=protected-access
# pylint: disable=g-doc-return-or-yield
@tf_contextlib.contextmanager
def container(self, container_name):
"""Returns a context manager that specifies the resource container to use.
Stateful operations, such as variables and queues, can maintain their
states on devices so that they can be shared by multiple processes.
A resource container is a string name under which these stateful
operations are tracked. These resources can be released or cleared
with `tf.Session.reset()`.
For example:
```python
with g.container('experiment0'):
# All stateful Operations constructed in this context will be placed
# in resource container "experiment0".
v1 = tf.Variable([1.0])
v2 = tf.Variable([2.0])
with g.container("experiment1"):
# All stateful Operations constructed in this context will be
# placed in resource container "experiment1".
v3 = tf.Variable([3.0])
q1 = tf.queue.FIFOQueue(10, tf.float32)
# All stateful Operations constructed in this context will be
# be created in the "experiment0".
v4 = tf.Variable([4.0])
q1 = tf.queue.FIFOQueue(20, tf.float32)
with g.container(""):
# All stateful Operations constructed in this context will be
# be placed in the default resource container.
v5 = tf.Variable([5.0])
q3 = tf.queue.FIFOQueue(30, tf.float32)
# Resets container "experiment0", after which the state of v1, v2, v4, q1
# will become undefined (such as uninitialized).
tf.Session.reset(target, ["experiment0"])
```
Args:
container_name: container name string.
Returns:
A context manager for defining resource containers for stateful ops,
yields the container name.
"""
original_container = self._container
self._container = container_name
try:
yield self._container
finally:
self._container = original_container
# pylint: enable=g-doc-return-or-yield
class _ControlDependenciesController(object):
"""Context manager for `control_dependencies()`."""
def __init__(self, graph, control_inputs):
"""Create a new `_ControlDependenciesController`.
A `_ControlDependenciesController` is the context manager for
`with tf.control_dependencies()` blocks. These normally nest,
as described in the documentation for `control_dependencies()`.
The `control_inputs` argument list control dependencies that must be
added to the current set of control dependencies. Because of
uniquification the set can be empty even if the caller passed a list of
ops. The special value `None` indicates that we want to start a new
empty set of control dependencies instead of extending the current set.
In that case we also clear the current control flow context, which is an
additional mechanism to add control dependencies.
Args:
graph: The graph that this controller is managing.
control_inputs: List of ops to use as control inputs in addition to the
current control dependencies. None to indicate that the dependencies
should be cleared.
"""
self._graph = graph
if control_inputs is None:
self._control_inputs_val = []
self._new_stack = True
else:
self._control_inputs_val = control_inputs
self._new_stack = False
self._seen_nodes = set()
self._old_stack = None
self._old_control_flow_context = None
# pylint: disable=protected-access
def __enter__(self):
if self._new_stack:
# Clear the control_dependencies graph.
self._old_stack = self._graph._control_dependencies_stack
self._graph._control_dependencies_stack = []
# Clear the control_flow_context too.
self._old_control_flow_context = self._graph._get_control_flow_context()
self._graph._set_control_flow_context(None)
self._graph._push_control_dependencies_controller(self)
def __exit__(self, unused_type, unused_value, unused_traceback):
self._graph._pop_control_dependencies_controller(self)
if self._new_stack:
self._graph._control_dependencies_stack = self._old_stack
self._graph._set_control_flow_context(self._old_control_flow_context)
# pylint: enable=protected-access
@property
def control_inputs(self):
return self._control_inputs_val
def add_op(self, op):
self._seen_nodes.add(op)
def op_in_group(self, op):
return op in self._seen_nodes
def _push_control_dependencies_controller(self, controller):
self._control_dependencies_stack.append(controller)
def _pop_control_dependencies_controller(self, controller):
assert self._control_dependencies_stack[-1] is controller
self._control_dependencies_stack.pop()
def _current_control_dependencies(self):
ret = set()
for controller in self._control_dependencies_stack:
for op in controller.control_inputs:
ret.add(op)
return ret
def _control_dependencies_for_inputs(self, input_ops):
"""For an op that takes `input_ops` as inputs, compute control inputs.
The returned control dependencies should yield an execution that
is equivalent to adding all control inputs in
self._control_dependencies_stack to a newly created op. However,
this function attempts to prune the returned control dependencies
by observing that nodes created within the same `with
control_dependencies(...):` block may have data dependencies that make
the explicit approach redundant.
Args:
input_ops: The data input ops for an op to be created.
Returns:
A list of control inputs for the op to be created.
"""
ret = []
for controller in self._control_dependencies_stack:
# If any of the input_ops already depends on the inputs from controller,
# we say that the new op is dominated (by that input), and we therefore
# do not need to add control dependencies for this controller's inputs.
dominated = False
for op in input_ops:
if controller.op_in_group(op):
dominated = True
break
if not dominated:
# Don't add a control input if we already have a data dependency on i.
# NOTE(mrry): We do not currently track transitive data dependencies,
# so we may add redundant control inputs.
ret.extend([c for c in controller.control_inputs if c not in input_ops])
return ret
def _record_op_seen_by_control_dependencies(self, op):
"""Record that the given op depends on all registered control dependencies.
Args:
op: An Operation.
"""
for controller in self._control_dependencies_stack:
controller.add_op(op)
def control_dependencies(self, control_inputs):
"""Returns a context manager that specifies control dependencies.
Use with the `with` keyword to specify that all operations constructed
within the context should have control dependencies on
`control_inputs`. For example:
```python
with g.control_dependencies([a, b, c]):
# `d` and `e` will only run after `a`, `b`, and `c` have executed.
d = ...
e = ...
```
Multiple calls to `control_dependencies()` can be nested, and in
that case a new `Operation` will have control dependencies on the union
of `control_inputs` from all active contexts.
```python
with g.control_dependencies([a, b]):
# Ops constructed here run after `a` and `b`.
with g.control_dependencies([c, d]):
# Ops constructed here run after `a`, `b`, `c`, and `d`.
```
You can pass None to clear the control dependencies:
```python
with g.control_dependencies([a, b]):
# Ops constructed here run after `a` and `b`.
with g.control_dependencies(None):
# Ops constructed here run normally, not waiting for either `a` or `b`.
with g.control_dependencies([c, d]):
# Ops constructed here run after `c` and `d`, also not waiting
# for either `a` or `b`.
```
*N.B.* The control dependencies context applies *only* to ops that
are constructed within the context. Merely using an op or tensor
in the context does not add a control dependency. The following
example illustrates this point:
```python
# WRONG
def my_func(pred, tensor):
t = tf.matmul(tensor, tensor)
with tf.control_dependencies([pred]):
# The matmul op is created outside the context, so no control
# dependency will be added.
return t
# RIGHT
def my_func(pred, tensor):
with tf.control_dependencies([pred]):
# The matmul op is created in the context, so a control dependency
# will be added.
return tf.matmul(tensor, tensor)
```
Also note that though execution of ops created under this scope will trigger
execution of the dependencies, the ops created under this scope might still
be pruned from a normal tensorflow graph. For example, in the following
snippet of code the dependencies are never executed:
```python
loss = model.loss()
with tf.control_dependencies(dependencies):
loss = loss + tf.constant(1) # note: dependencies ignored in the
# backward pass
return tf.gradients(loss, model.variables)
```
This is because evaluating the gradient graph does not require evaluating
the constant(1) op created in the forward pass.
Args:
control_inputs: A list of `Operation` or `Tensor` objects which must be
executed or computed before running the operations defined in the
context. Can also be `None` to clear the control dependencies.
Returns:
A context manager that specifies control dependencies for all
operations constructed within the context.
Raises:
TypeError: If `control_inputs` is not a list of `Operation` or
`Tensor` objects.
"""
if control_inputs is None:
return self._ControlDependenciesController(self, None)
# First convert the inputs to ops, and deduplicate them.
# NOTE(mrry): Other than deduplication, we do not currently track direct
# or indirect dependencies between control_inputs, which may result in
# redundant control inputs.
control_ops = []
current = self._current_control_dependencies()
for c in control_inputs:
# The hasattr(handle) is designed to match ResourceVariables. This is so
# control dependencies on a variable or on an unread variable don't
# trigger reads.
if (isinstance(c, IndexedSlices) or
(hasattr(c, "_handle") and hasattr(c, "op"))):
c = c.op
c = self.as_graph_element(c)
if isinstance(c, Tensor):
c = c.op
elif not isinstance(c, Operation):
raise TypeError("Control input must be Operation or Tensor: %s" % c)
if c not in current:
control_ops.append(c)
current.add(c)
return self._ControlDependenciesController(self, control_ops)
# pylint: disable=g-doc-return-or-yield
@tf_contextlib.contextmanager
def _attr_scope(self, attr_map):
"""EXPERIMENTAL: A context manager for setting attributes on operators.
This context manager can be used to add additional
attributes to operators within the scope of the context.
For example:
with ops.Graph().as_default() as g:
f_1 = Foo() # No extra attributes
with g._attr_scope({"_a": tf.attr_value_pb2.AttrValue(b=False)}):
f_2 = Foo() # Additional attribute _a=False
with g._attr_scope({"_a": tf.attr_value_pb2.AttrValue(b=True)}):
f_3 = Foo() # Additional attribute _a=False
with g._attr_scope({"_a": None}):
f_4 = Foo() # No additional attributes.
Args:
attr_map: A dictionary mapping attr name strings to AttrValue protocol
buffers or None.
Returns:
A context manager that sets the kernel label to be used for one or more
ops created in that context.
Raises:
TypeError: If attr_map is not a dictionary mapping
strings to AttrValue protobufs.
"""
if not isinstance(attr_map, dict):
raise TypeError("attr_map must be a dictionary mapping "
"strings to AttrValue protocol buffers")
# The saved_attrs dictionary stores any currently-set labels that
# will be overridden by this context manager.
saved_attrs = {}
# Install the given attribute
for name, attr in attr_map.items():
if not (isinstance(name, six.string_types) and
(isinstance(attr, (type(None), attr_value_pb2.AttrValue)) or
callable(attr))):
raise TypeError("attr_map must be a dictionary mapping "
"strings to AttrValue protocol buffers or "
"callables that emit AttrValue protocol buffers")
try:
saved_attrs[name] = self._attr_scope_map[name]
except KeyError:
pass
if attr is None:
del self._attr_scope_map[name]
else:
self._attr_scope_map[name] = attr
try:
yield # The code within the context runs here.
finally:
# Remove the attributes set for this context, and restore any saved
# attributes.
for name, attr in attr_map.items():
try:
self._attr_scope_map[name] = saved_attrs[name]
except KeyError:
del self._attr_scope_map[name]
# pylint: enable=g-doc-return-or-yield
# pylint: disable=g-doc-return-or-yield
@tf_contextlib.contextmanager
def _kernel_label_map(self, op_to_kernel_label_map):
"""EXPERIMENTAL: A context manager for setting kernel labels.
This context manager can be used to select particular
implementations of kernels within the scope of the context.
For example:
with ops.Graph().as_default() as g:
f_1 = Foo() # Uses the default registered kernel for the Foo op.
with g.kernel_label_map({"Foo": "v_2"}):
f_2 = Foo() # Uses the registered kernel with label "v_2"
# for the Foo op.
with g.kernel_label_map({"Foo": "v_3"}):
f_3 = Foo() # Uses the registered kernel with label "v_3"
# for the Foo op.
with g.kernel_label_map({"Foo": ""}):
f_4 = Foo() # Uses the default registered kernel
# for the Foo op.
Args:
op_to_kernel_label_map: A dictionary mapping op type strings to kernel
label strings.
Returns:
A context manager that sets the kernel label to be used for one or more
ops created in that context.
Raises:
TypeError: If op_to_kernel_label_map is not a dictionary mapping
strings to strings.
"""
if not isinstance(op_to_kernel_label_map, dict):
raise TypeError("op_to_kernel_label_map must be a dictionary mapping "
"strings to strings")
# The saved_labels dictionary stores any currently-set labels that
# will be overridden by this context manager.
saved_labels = {}
# Install the given label
for op_type, label in op_to_kernel_label_map.items():
if not (isinstance(op_type, six.string_types) and
isinstance(label, six.string_types)):
raise TypeError("op_to_kernel_label_map must be a dictionary mapping "
"strings to strings")
try:
saved_labels[op_type] = self._op_to_kernel_label_map[op_type]
except KeyError:
pass
self._op_to_kernel_label_map[op_type] = label
try:
yield # The code within the context runs here.
finally:
# Remove the labels set for this context, and restore any saved labels.
for op_type, label in op_to_kernel_label_map.items():
try:
self._op_to_kernel_label_map[op_type] = saved_labels[op_type]
except KeyError:
del self._op_to_kernel_label_map[op_type]
# pylint: enable=g-doc-return-or-yield
# pylint: disable=g-doc-return-or-yield
@tf_contextlib.contextmanager
def gradient_override_map(self, op_type_map):
"""EXPERIMENTAL: A context manager for overriding gradient functions.
This context manager can be used to override the gradient function
that will be used for ops within the scope of the context.
For example:
```python
@tf.RegisterGradient("CustomSquare")
def _custom_square_grad(op, grad):
# ...
with tf.Graph().as_default() as g:
c = tf.constant(5.0)
s_1 = tf.square(c) # Uses the default gradient for tf.square.
with g.gradient_override_map({"Square": "CustomSquare"}):
s_2 = tf.square(s_2) # Uses _custom_square_grad to compute the
# gradient of s_2.
```
Args:
op_type_map: A dictionary mapping op type strings to alternative op type
strings.
Returns:
A context manager that sets the alternative op type to be used for one
or more ops created in that context.
Raises:
TypeError: If `op_type_map` is not a dictionary mapping strings to
strings.
"""
if not isinstance(op_type_map, dict):
raise TypeError("op_type_map must be a dictionary mapping "
"strings to strings")
# The saved_mappings dictionary stores any currently-set mappings that
# will be overridden by this context manager.
saved_mappings = {}
# Install the given label
for op_type, mapped_op_type in op_type_map.items():
if not (isinstance(op_type, six.string_types) and
isinstance(mapped_op_type, six.string_types)):
raise TypeError("op_type_map must be a dictionary mapping "
"strings to strings")
try:
saved_mappings[op_type] = self._gradient_override_map[op_type]
except KeyError:
pass
self._gradient_override_map[op_type] = mapped_op_type
try:
yield # The code within the context runs here.
finally:
# Remove the labels set for this context, and restore any saved labels.
for op_type, mapped_op_type in op_type_map.items():
try:
self._gradient_override_map[op_type] = saved_mappings[op_type]
except KeyError:
del self._gradient_override_map[op_type]
# pylint: enable=g-doc-return-or-yield
def prevent_feeding(self, tensor):
"""Marks the given `tensor` as unfeedable in this graph."""
self._unfeedable_tensors.add(tensor)
def is_feedable(self, tensor):
"""Returns `True` if and only if `tensor` is feedable."""
return tensor not in self._unfeedable_tensors
def prevent_fetching(self, op):
"""Marks the given `op` as unfetchable in this graph."""
self._unfetchable_ops.add(op)
def is_fetchable(self, tensor_or_op):
"""Returns `True` if and only if `tensor_or_op` is fetchable."""
if isinstance(tensor_or_op, Tensor):
return tensor_or_op.op not in self._unfetchable_ops
else:
return tensor_or_op not in self._unfetchable_ops
def switch_to_thread_local(self):
"""Make device, colocation and dependencies stacks thread-local.
Device, colocation and dependencies stacks are not thread-local be default.
If multiple threads access them, then the state is shared. This means that
one thread may affect the behavior of another thread.
After this method is called, the stacks become thread-local. If multiple
threads access them, then the state is not shared. Each thread uses its own
value; a thread doesn't affect other threads by mutating such a stack.
The initial value for every thread's stack is set to the current value
of the stack when `switch_to_thread_local()` was first called.
"""
if not self._stack_state_is_thread_local:
self._stack_state_is_thread_local = True
@property
def _device_function_stack(self):
if self._stack_state_is_thread_local:
# This may be called from a thread where device_function_stack doesn't yet
# exist.
# pylint: disable=protected-access
if not hasattr(self._thread_local, "_device_function_stack"):
stack_copy_for_this_thread = self._graph_device_function_stack.copy()
self._thread_local._device_function_stack = stack_copy_for_this_thread
return self._thread_local._device_function_stack
# pylint: enable=protected-access
else:
return self._graph_device_function_stack
@property
def _device_functions_outer_to_inner(self):
user_device_specs = self._device_function_stack.peek_objs()
device_functions = [spec.function for spec in user_device_specs]
device_functions_outer_to_inner = list(reversed(device_functions))
return device_functions_outer_to_inner
def _snapshot_device_function_stack_metadata(self):
"""Return device function stack as a list of TraceableObjects.
Returns:
[traceable_stack.TraceableObject, ...] where each TraceableObject's .obj
member is a displayable name for the user's argument to Graph.device, and
the filename and lineno members point to the code location where
Graph.device was called directly or indirectly by the user.
"""
snapshot = []
for obj in self._device_function_stack.peek_traceable_objs():
obj_copy = obj.copy_metadata()
obj_copy.obj = obj.obj.display_name
snapshot.append(obj_copy)
return snapshot
@_device_function_stack.setter
def _device_function_stack(self, device_function_stack):
if self._stack_state_is_thread_local:
# pylint: disable=protected-access
self._thread_local._device_function_stack = device_function_stack
# pylint: enable=protected-access
else:
self._graph_device_function_stack = device_function_stack
@property
def _colocation_stack(self):
"""Return thread-local copy of colocation stack."""
if self._stack_state_is_thread_local:
# This may be called from a thread where colocation_stack doesn't yet
# exist.
# pylint: disable=protected-access
if not hasattr(self._thread_local, "_colocation_stack"):
stack_copy_for_this_thread = self._graph_colocation_stack.copy()
self._thread_local._colocation_stack = stack_copy_for_this_thread
return self._thread_local._colocation_stack
# pylint: enable=protected-access
else:
return self._graph_colocation_stack
def _snapshot_colocation_stack_metadata(self):
"""Return colocation stack metadata as a dictionary."""
return {
traceable_obj.obj.name: traceable_obj.copy_metadata()
for traceable_obj in self._colocation_stack.peek_traceable_objs()
}
@_colocation_stack.setter
def _colocation_stack(self, colocation_stack):
if self._stack_state_is_thread_local:
# pylint: disable=protected-access
self._thread_local._colocation_stack = colocation_stack
# pylint: enable=protected-access
else:
self._graph_colocation_stack = colocation_stack
@property
def _control_dependencies_stack(self):
if self._stack_state_is_thread_local:
# This may be called from a thread where control_dependencies_stack
# doesn't yet exist.
if not hasattr(self._thread_local, "_control_dependencies_stack"):
self._thread_local._control_dependencies_stack = (
self._graph_control_dependencies_stack[:])
return self._thread_local._control_dependencies_stack
else:
return self._graph_control_dependencies_stack
@_control_dependencies_stack.setter
def _control_dependencies_stack(self, control_dependencies):
if self._stack_state_is_thread_local:
self._thread_local._control_dependencies_stack = control_dependencies
else:
self._graph_control_dependencies_stack = control_dependencies
@property
def _distribution_strategy_stack(self):
"""A stack to maintain distribution strategy context for each thread."""
if not hasattr(self._thread_local, "_distribution_strategy_stack"):
self._thread_local._distribution_strategy_stack = [] # pylint: disable=protected-access
return self._thread_local._distribution_strategy_stack # pylint: disable=protected-access
@_distribution_strategy_stack.setter
def _distribution_strategy_stack(self, _distribution_strategy_stack):
self._thread_local._distribution_strategy_stack = ( # pylint: disable=protected-access
_distribution_strategy_stack)
@property
def _global_distribute_strategy_scope(self):
"""For implementing `tf.distribute.set_strategy()`."""
if not hasattr(self._thread_local, "distribute_strategy_scope"):
self._thread_local.distribute_strategy_scope = None
return self._thread_local.distribute_strategy_scope
@_global_distribute_strategy_scope.setter
def _global_distribute_strategy_scope(self, distribute_strategy_scope):
self._thread_local.distribute_strategy_scope = (distribute_strategy_scope)
@property
def _auto_cast_variable_read_dtype(self):
"""The dtype that instances of `AutoCastVariable` will be casted to.
This is None if `AutoCastVariables` should not be casted.
See `AutoCastVariable` for more information.
Returns:
The dtype that instances of `AutoCastVariable` will be casted to.
"""
if not hasattr(self._thread_local, "_auto_cast_variable_read_dtype"):
self._thread_local._auto_cast_variable_read_dtype = None # pylint: disable=protected-access
return self._thread_local._auto_cast_variable_read_dtype # pylint: disable=protected-access
@_auto_cast_variable_read_dtype.setter
def _auto_cast_variable_read_dtype(self, _auto_cast_variable_read_dtype):
self._thread_local._auto_cast_variable_read_dtype = ( # pylint: disable=protected-access
_auto_cast_variable_read_dtype)
@tf_contextlib.contextmanager
def _enable_auto_casting_variables(self, dtype):
"""Context manager to automatically cast AutoCastVariables.
If an AutoCastVariable `var` is used under this context manager, it will be
casted to `dtype` before being used.
See `AutoCastVariable` for more information.
Args:
dtype: The dtype that AutoCastVariables should be casted to.
Yields:
Nothing.
"""
prev_read_dtype = self._auto_cast_variable_read_dtype
try:
self._auto_cast_variable_read_dtype = dtype
yield
finally:
self._auto_cast_variable_read_dtype = prev_read_dtype
def _mutation_lock(self):
"""Returns a lock to guard code that creates & mutates ops.
See the comment for self._group_lock for more info.
"""
return self._group_lock.group(_MUTATION_LOCK_GROUP)
def _session_run_lock(self):
"""Returns a lock to guard code for Session.run.
See the comment for self._group_lock for more info.
"""
return self._group_lock.group(_SESSION_RUN_LOCK_GROUP)
# TODO(agarwal): currently device directives in an outer eager scope will not
# apply to inner graph mode code. Fix that.
@tf_export(v1=["device"])
def device(device_name_or_function):
"""Wrapper for `Graph.device()` using the default graph.
See `tf.Graph.device` for more details.
Args:
device_name_or_function: The device name or function to use in the context.
Returns:
A context manager that specifies the default device to use for newly
created ops.
Raises:
RuntimeError: If eager execution is enabled and a function is passed in.
"""
if context.executing_eagerly():
# TODO(agarwal): support device functions in EAGER mode.
if callable(device_name_or_function):
raise RuntimeError(
"tf.device does not support functions when eager execution "
"is enabled.")
return context.device(device_name_or_function)
else:
return get_default_graph().device(device_name_or_function)
@tf_export("device", v1=[])
def device_v2(device_name):
"""Specifies the device for ops created/executed in this context.
`device_name` can be fully specified, as in "/job:worker/task:1/device:cpu:0",
or partially specified, containing only a subset of the "/"-separated
fields. Any fields which are specified override device annotations from outer
scopes. For example:
```python
with tf.device('/job:foo'):
# ops created here have devices with /job:foo
with tf.device('/job:bar/task:0/device:gpu:2'):
# ops created here have the fully specified device above
with tf.device('/device:gpu:1'):
# ops created here have the device '/job:foo/device:gpu:1'
```
Args:
device_name: The device name to use in the context.
Returns:
A context manager that specifies the default device to use for newly
created ops.
Raises:
RuntimeError: If a function is passed in.
"""
if callable(device_name):
raise RuntimeError("tf.device does not support functions.")
if context.executing_eagerly():
return context.device(device_name)
else:
return get_default_graph().device(device_name)
@tf_export(v1=["container"])
def container(container_name):
"""Wrapper for `Graph.container()` using the default graph.
Args:
container_name: The container string to use in the context.
Returns:
A context manager that specifies the default container to use for newly
created stateful ops.
"""
return get_default_graph().container(container_name)
def _colocate_with_for_gradient(op, gradient_uid, ignore_existing=False):
if context.executing_eagerly():
if op is not None:
if not hasattr(op, "device"):
op = internal_convert_to_tensor_or_indexed_slices(op)
return device(op.device)
else:
return NullContextmanager()
else:
default_graph = get_default_graph()
if isinstance(op, EagerTensor):
if default_graph.building_function:
return default_graph.device(op.device)
else:
raise ValueError("Encountered an Eager-defined Tensor during graph "
"construction, but a function was not being built.")
return default_graph._colocate_with_for_gradient(
op, gradient_uid=gradient_uid, ignore_existing=ignore_existing)
# Internal interface to colocate_with. colocate_with has been deprecated from
# public API. There are still a few internal uses of colocate_with. Add internal
# only API for those uses to avoid deprecation warning.
def colocate_with(op, ignore_existing=False):
return _colocate_with_for_gradient(op, None, ignore_existing=ignore_existing)
@deprecation.deprecated(
date=None, instructions="Colocations handled automatically by placer.")
@tf_export(v1=["colocate_with"])
def _colocate_with(op, ignore_existing=False):
return colocate_with(op, ignore_existing)
@tf_export("control_dependencies")
def control_dependencies(control_inputs):
"""Wrapper for `Graph.control_dependencies()` using the default graph.
See `tf.Graph.control_dependencies`
for more details.
When eager execution is enabled, any callable object in the `control_inputs`
list will be called.
Args:
control_inputs: A list of `Operation` or `Tensor` objects which must be
executed or computed before running the operations defined in the context.
Can also be `None` to clear the control dependencies. If eager execution
is enabled, any callable object in the `control_inputs` list will be
called.
Returns:
A context manager that specifies control dependencies for all
operations constructed within the context.
"""
if context.executing_eagerly():
if control_inputs:
# Excute any pending callables.
for control in control_inputs:
if callable(control):
control()
return NullContextmanager()
else:
return get_default_graph().control_dependencies(control_inputs)
class _DefaultStack(threading.local):
"""A thread-local stack of objects for providing implicit defaults."""
def __init__(self):
super(_DefaultStack, self).__init__()
self._enforce_nesting = True
self.stack = []
def get_default(self):
return self.stack[-1] if len(self.stack) >= 1 else None
def reset(self):
self.stack = []
def is_cleared(self):
return not self.stack
@property
def enforce_nesting(self):
return self._enforce_nesting
@enforce_nesting.setter
def enforce_nesting(self, value):
self._enforce_nesting = value
@tf_contextlib.contextmanager
def get_controller(self, default):
"""A context manager for manipulating a default stack."""
self.stack.append(default)
try:
yield default
finally:
# stack may be empty if reset() was called
if self.stack:
if self._enforce_nesting:
if self.stack[-1] is not default:
raise AssertionError(
"Nesting violated for default stack of %s objects" %
type(default))
self.stack.pop()
else:
self.stack.remove(default)
_default_session_stack = _DefaultStack() # pylint: disable=protected-access
def default_session(session):
"""Python "with" handler for defining a default session.
This function provides a means of registering a session for handling
Tensor.eval() and Operation.run() calls. It is primarily intended for use
by session.Session, but can be used with any object that implements
the Session.run() interface.
Use with the "with" keyword to specify that Tensor.eval() and Operation.run()
invocations within the scope of a block should be executed by a particular
session.
The default session applies to the current thread only, so it is always
possible to inspect the call stack and determine the scope of a default
session. If you create a new thread, and wish to use the default session
in that thread, you must explicitly add a "with ops.default_session(sess):"
block in that thread's function.
Example:
The following code examples are equivalent:
# 1. Using the Session object directly:
sess = ...
c = tf.constant(5.0)
sess.run(c)
# 2. Using default_session():
sess = ...
with ops.default_session(sess):
c = tf.constant(5.0)
result = c.eval()
# 3. Overriding default_session():
sess = ...
with ops.default_session(sess):
c = tf.constant(5.0)
with ops.default_session(...):
c.eval(session=sess)
Args:
session: The session to be installed as the default session.
Returns:
A context manager for the default session.
"""
return _default_session_stack.get_controller(session)
@tf_export(v1=["get_default_session"])
def get_default_session():
"""Returns the default session for the current thread.
The returned `Session` will be the innermost session on which a
`Session` or `Session.as_default()` context has been entered.
NOTE: The default session is a property of the current thread. If you
create a new thread, and wish to use the default session in that
thread, you must explicitly add a `with sess.as_default():` in that
thread's function.
Returns:
The default `Session` being used in the current thread.
"""
return _default_session_stack.get_default()
def _eval_using_default_session(tensors, feed_dict, graph, session=None):
"""Uses the default session to evaluate one or more tensors.
Args:
tensors: A single Tensor, or a list of Tensor objects.
feed_dict: A dictionary that maps Tensor objects (or tensor names) to lists,
numpy ndarrays, TensorProtos, or strings.
graph: The graph in which the tensors are defined.
session: (Optional) A different session to use to evaluate "tensors".
Returns:
Either a single numpy ndarray if "tensors" is a single tensor; or a list
of numpy ndarrays that each correspond to the respective element in
"tensors".
Raises:
ValueError: If no default session is available; the default session
does not have "graph" as its graph; or if "session" is specified,
and it does not have "graph" as its graph.
"""
if session is None:
session = get_default_session()
if session is None:
raise ValueError("Cannot evaluate tensor using `eval()`: No default "
"session is registered. Use `with "
"sess.as_default()` or pass an explicit session to "
"`eval(session=sess)`")
if session.graph is not graph:
raise ValueError("Cannot use the default session to evaluate tensor: "
"the tensor's graph is different from the session's "
"graph. Pass an explicit session to "
"`eval(session=sess)`.")
else:
if session.graph is not graph:
raise ValueError("Cannot use the given session to evaluate tensor: "
"the tensor's graph is different from the session's "
"graph.")
return session.run(tensors, feed_dict)
def _run_using_default_session(operation, feed_dict, graph, session=None):
"""Uses the default session to run "operation".
Args:
operation: The Operation to be run.
feed_dict: A dictionary that maps Tensor objects (or tensor names) to lists,
numpy ndarrays, TensorProtos, or strings.
graph: The graph in which "operation" is defined.
session: (Optional) A different session to use to run "operation".
Raises:
ValueError: If no default session is available; the default session
does not have "graph" as its graph; or if "session" is specified,
and it does not have "graph" as its graph.
"""
if session is None:
session = get_default_session()
if session is None:
raise ValueError("Cannot execute operation using `run()`: No default "
"session is registered. Use `with "
"sess.as_default():` or pass an explicit session to "
"`run(session=sess)`")
if session.graph is not graph:
raise ValueError("Cannot use the default session to execute operation: "
"the operation's graph is different from the "
"session's graph. Pass an explicit session to "
"run(session=sess).")
else:
if session.graph is not graph:
raise ValueError("Cannot use the given session to execute operation: "
"the operation's graph is different from the session's "
"graph.")
session.run(operation, feed_dict)
class _DefaultGraphStack(_DefaultStack): # pylint: disable=protected-access
"""A thread-local stack of objects for providing an implicit default graph."""
def __init__(self):
super(_DefaultGraphStack, self).__init__()
self._global_default_graph = None
def get_default(self):
"""Override that returns a global default if the stack is empty."""
ret = super(_DefaultGraphStack, self).get_default()
if ret is None:
ret = self._GetGlobalDefaultGraph()
return ret
def _GetGlobalDefaultGraph(self):
if self._global_default_graph is None:
# TODO(mrry): Perhaps log that the default graph is being used, or set
# provide some other feedback to prevent confusion when a mixture of
# the global default graph and an explicit graph are combined in the
# same process.
self._global_default_graph = Graph()
return self._global_default_graph
def reset(self):
super(_DefaultGraphStack, self).reset()
self._global_default_graph = None
@tf_contextlib.contextmanager
def get_controller(self, default):
context.context().context_switches.push(default.building_function,
default.as_default,
default._device_function_stack)
try:
with super(_DefaultGraphStack,
self).get_controller(default) as g, context.graph_mode():
yield g
finally:
# If an exception is raised here it may be hiding a related exception in
# the try-block (just above).
context.context().context_switches.pop()
_default_graph_stack = _DefaultGraphStack()
# pylint: disable=g-doc-return-or-yield,line-too-long
@tf_export("init_scope")
@tf_contextlib.contextmanager
def init_scope():
"""A context manager that lifts ops out of control-flow scopes and function-building graphs.
There is often a need to lift variable initialization ops out of control-flow
scopes, function-building graphs, and gradient tapes. Entering an
`init_scope` is a mechanism for satisfying these desiderata. In particular,
entering an `init_scope` has three effects:
(1) All control dependencies are cleared the moment the scope is entered;
this is equivalent to entering the context manager returned from
`control_dependencies(None)`, which has the side-effect of exiting
control-flow scopes like `tf.cond` and `tf.while_loop`.
(2) All operations that are created while the scope is active are lifted
into the lowest context on the `context_stack` that is not building a
graph function. Here, a context is defined as either a graph or an eager
context. Every context switch, i.e., every installation of a graph as
the default graph and every switch into eager mode, is logged in a
thread-local stack called `context_switches`; the log entry for a
context switch is popped from the stack when the context is exited.
Entering an `init_scope` is equivalent to crawling up
`context_switches`, finding the first context that is not building a
graph function, and entering it. A caveat is that if graph mode is
enabled but the default graph stack is empty, then entering an
`init_scope` will simply install a fresh graph as the default one.
(3) The gradient tape is paused while the scope is active.
When eager execution is enabled, code inside an init_scope block runs with
eager execution enabled even when defining graph functions via
tf.contrib.eager.defun. For example:
```python
tf.compat.v1.enable_eager_execution()
@tf.contrib.eager.defun
def func():
# A defun-decorated function constructs TensorFlow graphs,
# it does not execute eagerly.
assert not tf.executing_eagerly()
with tf.init_scope():
# Initialization runs with eager execution enabled
assert tf.executing_eagerly()
```
Raises:
RuntimeError: if graph state is incompatible with this initialization.
"""
# pylint: enable=g-doc-return-or-yield,line-too-long
if context.executing_eagerly():
# Fastpath.
with tape.stop_recording():
yield
else:
# Retrieve the active name scope: entering an `init_scope` preserves
# the name scope of the current context.
default_graph = get_default_graph()
scope = default_graph.get_name_scope()
if scope and scope[-1] != "/":
# Names that end with trailing slashes are treated by `name_scope` as
# absolute.
scope = scope + "/"
innermost_nonempty_device_stack = default_graph._device_function_stack # pylint: disable=protected-access
outer_context = None
if not _default_graph_stack.stack:
# If the default graph stack is empty, then we cannot be building a
# function. Install the global graph (which, in this case, is also the
# default graph) as the outer context.
if default_graph.building_function:
raise RuntimeError("The global graph is building a function.")
outer_context = default_graph.as_default
else:
# Find a context that is not building a function.
for stack_entry in reversed(context.context().context_switches.stack):
if not innermost_nonempty_device_stack:
innermost_nonempty_device_stack = stack_entry.device_stack
if not stack_entry.is_building_function:
outer_context = stack_entry.enter_context_fn
break
if outer_context is None:
# As a last resort, obtain the global default graph; this graph doesn't
# necessarily live on the graph stack (and hence it doesn't necessarily
# live on the context stack), but it is stored in the graph stack's
# encapsulating object.
outer_context = _default_graph_stack._GetGlobalDefaultGraph().as_default # pylint: disable=protected-access
if outer_context is None:
# Sanity check; this shouldn't be triggered.
raise RuntimeError("All graphs are building functions, and no "
"eager context was previously active.")
outer_graph = None
outer_device_stack = None
try:
with outer_context(), name_scope(scope), control_dependencies(
None), tape.stop_recording():
context_manager = NullContextmanager
context_manager_input = None
if not context.executing_eagerly():
# The device stack is preserved when lifting into a graph. Eager
# execution doesn't implement device stacks and in particular it
# doesn't support device functions, so in general it's not possible
# to do the same when lifting into the eager context.
outer_graph = get_default_graph()
outer_device_stack = outer_graph._device_function_stack # pylint: disable=protected-access
outer_graph._device_function_stack = innermost_nonempty_device_stack # pylint: disable=protected-access
elif innermost_nonempty_device_stack is not None:
for device_spec in innermost_nonempty_device_stack.peek_objs():
if device_spec.function is None:
break
if device_spec.raw_string:
context_manager = context.device
context_manager_input = device_spec.raw_string
break
# It is currently not possible to have a device function in V2,
# but in V1 we are unable to apply device functions in eager mode.
# This means that we will silently skip some of the entries on the
# device stack in V1 + eager mode.
with context_manager(context_manager_input):
yield
finally:
# If an exception is raised here it may be hiding a related exception in
# try-block (just above).
if outer_graph is not None:
outer_graph._device_function_stack = outer_device_stack # pylint: disable=protected-access
def executing_eagerly_outside_functions():
"""Returns True if executing eagerly, even if inside a graph function."""
# Fastpath for when this is called eagerly (its not necessary to init_scope).
if context.executing_eagerly():
return True
with init_scope():
return context.executing_eagerly()
def inside_function():
return get_default_graph().building_function
@tf_export(v1=["enable_eager_execution"])
def enable_eager_execution(config=None, device_policy=None,
execution_mode=None):
"""Enables eager execution for the lifetime of this program.
Eager execution provides an imperative interface to TensorFlow. With eager
execution enabled, TensorFlow functions execute operations immediately (as
opposed to adding to a graph to be executed later in a `tf.compat.v1.Session`)
and
return concrete values (as opposed to symbolic references to a node in a
computational graph).
For example:
```python
tf.compat.v1.enable_eager_execution()
# After eager execution is enabled, operations are executed as they are
# defined and Tensor objects hold concrete values, which can be accessed as
# numpy.ndarray`s through the numpy() method.
assert tf.multiply(6, 7).numpy() == 42
```
Eager execution cannot be enabled after TensorFlow APIs have been used to
create or execute graphs. It is typically recommended to invoke this function
at program startup and not in a library (as most libraries should be usable
both with and without eager execution).
Args:
config: (Optional.) A `tf.compat.v1.ConfigProto` to use to configure the
environment in which operations are executed. Note that
`tf.compat.v1.ConfigProto` is also used to configure graph execution (via
`tf.compat.v1.Session`) and many options within `tf.compat.v1.ConfigProto`
are not implemented (or are irrelevant) when eager execution is enabled.
device_policy: (Optional.) Policy controlling how operations requiring
inputs on a specific device (e.g., a GPU 0) handle inputs on a different
device (e.g. GPU 1 or CPU). When set to None, an appropriate value will
be picked automatically. The value picked may change between TensorFlow
releases.
Valid values:
- tf.contrib.eager.DEVICE_PLACEMENT_EXPLICIT: raises an error if the
placement is not correct.
- tf.contrib.eager.DEVICE_PLACEMENT_WARN: copies the tensors which are not
on the right device but logs a warning.
- tf.contrib.eager.DEVICE_PLACEMENT_SILENT: silently copies the tensors.
Note that this may hide performance problems as there is no notification
provided when operations are blocked on the tensor being copied between
devices.
- tf.contrib.eager.DEVICE_PLACEMENT_SILENT_FOR_INT32: silently copies
int32 tensors, raising errors on the other ones.
execution_mode: (Optional.) Policy controlling how operations dispatched are
actually executed. When set to None, an appropriate value will be picked
automatically. The value picked may change between TensorFlow releases.
Valid values:
- tf.contrib.eager.SYNC: executes each operation synchronously.
- tf.contrib.eager.ASYNC: executes each operation asynchronously. These
operations may return "non-ready" handles.
Raises:
ValueError: If eager execution is enabled after creating/executing a
TensorFlow graph, or if options provided conflict with a previous call
to this function.
"""
if context.default_execution_mode != context.EAGER_MODE:
return enable_eager_execution_internal(
config=config,
device_policy=device_policy,
execution_mode=execution_mode,
server_def=None)
@tf_export(v1=["disable_eager_execution"])
def disable_eager_execution():
"""Disables eager execution.
This function can only be called before any Graphs, Ops, or Tensors have been
created. It can be used at the beginning of the program for complex migration
projects from TensorFlow 1.x to 2.x.
"""
context.default_execution_mode = context.GRAPH_MODE
c = context.context_safe()
if c is not None:
c._thread_local_data.is_eager = False # pylint: disable=protected-access
def enable_eager_execution_internal(config=None,
device_policy=None,
execution_mode=None,
server_def=None):
"""Enables eager execution for the lifetime of this program.
Most of the doc string for enable_eager_execution is relevant here as well.
Args:
config: See enable_eager_execution doc string
device_policy: See enable_eager_execution doc string
execution_mode: See enable_eager_execution doc string
server_def: (Optional.) A tensorflow::ServerDef proto. Enables execution on
remote devices. GrpcServers need to be started by creating an identical
server_def to this, and setting the appropriate task_indexes, so that the
servers can communicate. It will then be possible to execute operations on
remote devices.
Raises:
ValueError
"""
if config is not None and not isinstance(config, config_pb2.ConfigProto):
raise TypeError("config must be a tf.ConfigProto, but got %s" %
type(config))
if device_policy not in (None, context.DEVICE_PLACEMENT_EXPLICIT,
context.DEVICE_PLACEMENT_WARN,
context.DEVICE_PLACEMENT_SILENT,
context.DEVICE_PLACEMENT_SILENT_FOR_INT32):
raise ValueError(
"device_policy must be one of None, tf.contrib.eager.DEVICE_PLACEMENT_*"
)
if execution_mode not in (None, context.SYNC, context.ASYNC):
raise ValueError(
"execution_mode must be one of None, tf.contrib.eager.SYNC, "
"tf.contrib.eager.ASYNC")
if context.default_execution_mode == context.GRAPH_MODE:
graph_mode_has_been_used = (
_default_graph_stack._global_default_graph is not None) # pylint: disable=protected-access
if graph_mode_has_been_used:
raise ValueError(
"tf.enable_eager_execution must be called at program startup.")
context.default_execution_mode = context.EAGER_MODE
# pylint: disable=protected-access
if context._context is None:
context._context = context.Context(
config=config,
device_policy=device_policy,
execution_mode=execution_mode,
server_def=server_def)
elif ((config is not None and config is not context._context._config) or
(device_policy is not None and
device_policy is not context._context._device_policy) or
(execution_mode is not None and
execution_mode is not context._context._execution_mode)):
raise ValueError(
"Trying to change the options of an active eager"
" execution. Context config: %s, specified config:"
" %s. Context device policy: %s, specified device"
" policy: %s. Context execution mode: %s, "
" specified execution mode %s." %
(context._context._config, config, context._context._device_policy,
device_policy, context._context._execution_mode, execution_mode))
else:
# We already created everything, so update the thread local data.
context._context._thread_local_data.is_eager = True
# Monkey patch to get rid of an unnecessary conditional since the context is
# now initialized.
context.context = context.context_safe
def eager_run(main=None, argv=None):
"""Runs the program with an optional main function and argv list.
The program will run with eager execution enabled.
Example:
```python
import tensorflow as tf
# Import subject to future changes:
from tensorflow.contrib.eager.python import tfe
def main(_):
u = tf.constant(6.0)
v = tf.constant(7.0)
print(u * v)
if __name__ == "__main__":
tfe.run()
```
Args:
main: the main function to run.
argv: the arguments to pass to it.
"""
enable_eager_execution()
app.run(main, argv)
@tf_export(v1=["reset_default_graph"])
def reset_default_graph():
"""Clears the default graph stack and resets the global default graph.
NOTE: The default graph is a property of the current thread. This
function applies only to the current thread. Calling this function while
a `tf.compat.v1.Session` or `tf.compat.v1.InteractiveSession` is active will
result in undefined
behavior. Using any previously created `tf.Operation` or `tf.Tensor` objects
after calling this function will result in undefined behavior.
Raises:
AssertionError: If this function is called within a nested graph.
"""
if not _default_graph_stack.is_cleared():
raise AssertionError("Do not use tf.reset_default_graph() to clear "
"nested graphs. If you need a cleared graph, "
"exit the nesting and create a new graph.")
_default_graph_stack.reset()
@tf_export(v1=["get_default_graph"])
def get_default_graph():
"""Returns the default graph for the current thread.
The returned graph will be the innermost graph on which a
`Graph.as_default()` context has been entered, or a global default
graph if none has been explicitly created.
NOTE: The default graph is a property of the current thread. If you
create a new thread, and wish to use the default graph in that
thread, you must explicitly add a `with g.as_default():` in that
thread's function.
Returns:
The default `Graph` being used in the current thread.
"""
return _default_graph_stack.get_default()
def has_default_graph():
"""Returns True if there is a default graph."""
return len(_default_graph_stack.stack) >= 1
def get_name_scope():
"""Returns the current name scope in the default_graph.
For example:
```python
with tf.name_scope('scope1'):
with tf.name_scope('scope2'):
print(tf.get_name_scope())
```
would print the string `scope1/scope2`.
Returns:
A string representing the current name scope.
"""
if context.executing_eagerly():
return context.context().scope_name.rstrip("/")
return get_default_graph().get_name_scope()
def _assert_same_graph(original_item, item):
"""Fail if the 2 items are from different graphs.
Args:
original_item: Original item to check against.
item: Item to check.
Raises:
ValueError: if graphs do not match.
"""
if original_item.graph is not item.graph:
raise ValueError("%s must be from the same graph as %s." %
(item, original_item))
def _get_graph_from_inputs(op_input_list, graph=None):
"""Returns the appropriate graph to use for the given inputs.
This library method provides a consistent algorithm for choosing the graph
in which an Operation should be constructed:
1. If the default graph is being used to construct a function, we
use the default graph.
2. If the "graph" is specified explicitly, we validate that all of the inputs
in "op_input_list" are compatible with that graph.
3. Otherwise, we attempt to select a graph from the first Operation-
or Tensor-valued input in "op_input_list", and validate that all other
such inputs are in the same graph.
4. If the graph was not specified and it could not be inferred from
"op_input_list", we attempt to use the default graph.
Args:
op_input_list: A list of inputs to an operation, which may include `Tensor`,
`Operation`, and other objects that may be converted to a graph element.
graph: (Optional) The explicit graph to use.
Raises:
TypeError: If op_input_list is not a list or tuple, or if graph is not a
Graph.
ValueError: If a graph is explicitly passed and not all inputs are from it,
or if the inputs are from multiple graphs, or we could not find a graph
and there was no default graph.
Returns:
The appropriate graph to use for the given inputs.
"""
if get_default_graph().building_function:
return get_default_graph()
op_input_list = tuple(op_input_list) # Handle generators correctly
if graph and not isinstance(graph, Graph):
raise TypeError("Input graph needs to be a Graph: %s" % graph)
# 1. We validate that all of the inputs are from the same graph. This is
# either the supplied graph parameter, or the first one selected from one
# the graph-element-valued inputs. In the latter case, we hold onto
# that input in original_graph_element so we can provide a more
# informative error if a mismatch is found.
original_graph_element = None
for op_input in op_input_list:
# Determine if this is a valid graph_element.
# TODO(josh11b): Note that we exclude subclasses of Tensor. Need to clean this
# up.
graph_element = None
if (isinstance(op_input, (Operation, _TensorLike)) and
((not isinstance(op_input, Tensor)) or type(op_input) == Tensor)): # pylint: disable=unidiomatic-typecheck
graph_element = op_input
else:
graph_element = _as_graph_element(op_input)
if graph_element is not None:
if not graph:
original_graph_element = graph_element
graph = graph_element.graph
elif original_graph_element is not None:
_assert_same_graph(original_graph_element, graph_element)
elif graph_element.graph is not graph:
raise ValueError("%s is not from the passed-in graph." % graph_element)
# 2. If all else fails, we use the default graph, which is always there.
return graph or get_default_graph()
@tf_export(v1=["GraphKeys"])
class GraphKeys(object):
"""Standard names to use for graph collections.
The standard library uses various well-known names to collect and
retrieve values associated with a graph. For example, the
`tf.Optimizer` subclasses default to optimizing the variables
collected under `tf.GraphKeys.TRAINABLE_VARIABLES` if none is
specified, but it is also possible to pass an explicit list of
variables.
The following standard keys are defined:
* `GLOBAL_VARIABLES`: the default collection of `Variable` objects, shared
across distributed environment (model variables are subset of these). See
`tf.compat.v1.global_variables`
for more details.
Commonly, all `TRAINABLE_VARIABLES` variables will be in `MODEL_VARIABLES`,
and all `MODEL_VARIABLES` variables will be in `GLOBAL_VARIABLES`.
* `LOCAL_VARIABLES`: the subset of `Variable` objects that are local to each
machine. Usually used for temporarily variables, like counters.
Note: use `tf.contrib.framework.local_variable` to add to this collection.
* `MODEL_VARIABLES`: the subset of `Variable` objects that are used in the
model for inference (feed forward). Note: use
`tf.contrib.framework.model_variable` to add to this collection.
* `TRAINABLE_VARIABLES`: the subset of `Variable` objects that will
be trained by an optimizer. See
`tf.compat.v1.trainable_variables`
for more details.
* `SUMMARIES`: the summary `Tensor` objects that have been created in the
graph. See
`tf.compat.v1.summary.merge_all`
for more details.
* `QUEUE_RUNNERS`: the `QueueRunner` objects that are used to
produce input for a computation. See
`tf.compat.v1.train.start_queue_runners`
for more details.
* `MOVING_AVERAGE_VARIABLES`: the subset of `Variable` objects that will also
keep moving averages. See
`tf.compat.v1.moving_average_variables`
for more details.
* `REGULARIZATION_LOSSES`: regularization losses collected during graph
construction.
The following standard keys are _defined_, but their collections are **not**
automatically populated as many of the others are:
* `WEIGHTS`
* `BIASES`
* `ACTIVATIONS`
"""
# Key to collect Variable objects that are global (shared across machines).
# Default collection for all variables, except local ones.
GLOBAL_VARIABLES = "variables"
# Key to collect local variables that are local to the machine and are not
# saved/restored.
LOCAL_VARIABLES = "local_variables"
# Key to collect local variables which are used to accumulate interal state
# to be used in tf.metrics.*.
METRIC_VARIABLES = "metric_variables"
# Key to collect model variables defined by layers.
MODEL_VARIABLES = "model_variables"
# Key to collect Variable objects that will be trained by the
# optimizers.
TRAINABLE_VARIABLES = "trainable_variables"
# Key to collect summaries.
SUMMARIES = "summaries"
# Key to collect QueueRunners.
QUEUE_RUNNERS = "queue_runners"
# Key to collect table initializers.
TABLE_INITIALIZERS = "table_initializer"
# Key to collect asset filepaths. An asset represents an external resource
# like a vocabulary file.
ASSET_FILEPATHS = "asset_filepaths"
# Key to collect Variable objects that keep moving averages.
MOVING_AVERAGE_VARIABLES = "moving_average_variables"
# Key to collect regularization losses at graph construction.
REGULARIZATION_LOSSES = "regularization_losses"
# Key to collect concatenated sharded variables.
CONCATENATED_VARIABLES = "concatenated_variables"
# Key to collect savers.
SAVERS = "savers"
# Key to collect weights
WEIGHTS = "weights"
# Key to collect biases
BIASES = "biases"
# Key to collect activations
ACTIVATIONS = "activations"
# Key to collect update_ops
UPDATE_OPS = "update_ops"
# Key to collect losses
LOSSES = "losses"
# Key to collect BaseSaverBuilder.SaveableObject instances for checkpointing.
SAVEABLE_OBJECTS = "saveable_objects"
# Key to collect all shared resources used by the graph which need to be
# initialized once per cluster.
RESOURCES = "resources"
# Key to collect all shared resources used in this graph which need to be
# initialized once per session.
LOCAL_RESOURCES = "local_resources"
# Trainable resource-style variables.
TRAINABLE_RESOURCE_VARIABLES = "trainable_resource_variables"
# Key to indicate various ops.
INIT_OP = "init_op"
LOCAL_INIT_OP = "local_init_op"
READY_OP = "ready_op"
READY_FOR_LOCAL_INIT_OP = "ready_for_local_init_op"
SUMMARY_OP = "summary_op"
GLOBAL_STEP = "global_step"
# Used to count the number of evaluations performed during a single evaluation
# run.
EVAL_STEP = "eval_step"
TRAIN_OP = "train_op"
# Key for control flow context.
COND_CONTEXT = "cond_context"
WHILE_CONTEXT = "while_context"
# Used to store v2 summary names.
_SUMMARY_COLLECTION = "_SUMMARY_V2"
# List of all collections that keep track of variables.
_VARIABLE_COLLECTIONS = [
GLOBAL_VARIABLES,
LOCAL_VARIABLES,
METRIC_VARIABLES,
MODEL_VARIABLES,
TRAINABLE_VARIABLES,
MOVING_AVERAGE_VARIABLES,
CONCATENATED_VARIABLES,
TRAINABLE_RESOURCE_VARIABLES,
]
# Key for streaming model ports.
# NOTE(yuanbyu): internal and experimental.
_STREAMING_MODEL_PORTS = "streaming_model_ports"
@decorator_utils.classproperty
@deprecation.deprecated(None, "Use `tf.GraphKeys.GLOBAL_VARIABLES` instead.")
def VARIABLES(cls): # pylint: disable=no-self-argument
return cls.GLOBAL_VARIABLES
def dismantle_graph(graph):
"""Cleans up reference cycles from a `Graph`.
Helpful for making sure the garbage collector doesn't need to run after a
temporary `Graph` is no longer needed.
Args:
graph: A `Graph` object to destroy. Neither it nor any of its ops are usable
after this function runs.
"""
memory.dismantle_ordered_dict(graph._functions) # pylint: disable=protected-access
# Now clean up Operation<->Graph reference cycles by clearing all of the
# attributes for the Graph and its ops.
graph_operations = graph.get_operations()
for op in graph_operations:
op.__dict__ = {}
graph.__dict__ = {}
@tf_export(v1=["add_to_collection"])
def add_to_collection(name, value):
"""Wrapper for `Graph.add_to_collection()` using the default graph.
See `tf.Graph.add_to_collection`
for more details.
Args:
name: The key for the collection. For example, the `GraphKeys` class
contains many standard names for collections.
value: The value to add to the collection. @compatibility(eager)
Collections are only supported in eager when variables are created inside
an EagerVariableStore (e.g. as part of a layer or template).
@end_compatibility
"""
get_default_graph().add_to_collection(name, value)
@tf_export(v1=["add_to_collections"])
def add_to_collections(names, value):
"""Wrapper for `Graph.add_to_collections()` using the default graph.
See `tf.Graph.add_to_collections`
for more details.
Args:
names: The key for the collections. The `GraphKeys` class contains many
standard names for collections.
value: The value to add to the collections. @compatibility(eager)
Collections are only supported in eager when variables are created inside
an EagerVariableStore (e.g. as part of a layer or template).
@end_compatibility
"""
get_default_graph().add_to_collections(names, value)
@tf_export(v1=["get_collection_ref"])
def get_collection_ref(key):
"""Wrapper for `Graph.get_collection_ref()` using the default graph.
See `tf.Graph.get_collection_ref`
for more details.
Args:
key: The key for the collection. For example, the `GraphKeys` class contains
many standard names for collections.
Returns:
The list of values in the collection with the given `name`, or an empty
list if no value has been added to that collection. Note that this returns
the collection list itself, which can be modified in place to change the
collection.
@compatibility(eager)
Collections are not supported when eager execution is enabled.
@end_compatibility
"""
return get_default_graph().get_collection_ref(key)
@tf_export(v1=["get_collection"])
def get_collection(key, scope=None):
"""Wrapper for `Graph.get_collection()` using the default graph.
See `tf.Graph.get_collection`
for more details.
Args:
key: The key for the collection. For example, the `GraphKeys` class contains
many standard names for collections.
scope: (Optional.) If supplied, the resulting list is filtered to include
only items whose `name` attribute matches using `re.match`. Items without
a `name` attribute are never returned if a scope is supplied and the
choice or `re.match` means that a `scope` without special tokens filters
by prefix.
Returns:
The list of values in the collection with the given `name`, or
an empty list if no value has been added to that collection. The
list contains the values in the order under which they were
collected.
@compatibility(eager)
Collections are not supported when eager execution is enabled.
@end_compatibility
"""
return get_default_graph().get_collection(key, scope)
def get_all_collection_keys():
"""Returns a list of collections used in the default graph."""
return get_default_graph().get_all_collection_keys()
name_scope_cache = {}
# Named like a function for backwards compatibility with the
# @tf_contextlib.contextmanager version, which was switched to a class to avoid
# some object creation overhead.
@tf_export(v1=["name_scope"])
class name_scope(object): # pylint: disable=invalid-name
"""A context manager for use when defining a Python op.
This context manager validates that the given `values` are from the
same graph, makes that graph the default graph, and pushes a
name scope in that graph (see
`tf.Graph.name_scope`
for more details on that).
For example, to define a new Python op called `my_op`:
```python
def my_op(a, b, c, name=None):
with tf.name_scope(name, "MyOp", [a, b, c]) as scope:
a = tf.convert_to_tensor(a, name="a")
b = tf.convert_to_tensor(b, name="b")
c = tf.convert_to_tensor(c, name="c")
# Define some computation that uses `a`, `b`, and `c`.
return foo_op(..., name=scope)
```
"""
@property
def name(self):
return self._name
def __init__(self, name, default_name=None, values=None):
"""Initialize the context manager.
Args:
name: The name argument that is passed to the op function.
default_name: The default name to use if the `name` argument is `None`.
values: The list of `Tensor` arguments that are passed to the op function.
Raises:
TypeError: if `default_name` is passed in but not a string.
"""
if not (default_name is None or isinstance(default_name, six.string_types)):
raise TypeError(
"`default_name` type (%s) is not a string type. You likely meant to "
"pass this into the `values` kwarg." % type(default_name))
self._name = default_name if name is None else name
self._default_name = default_name
self._values = values
self._ctx = context.context()
self._in_eager_mode = self._ctx.executing_eagerly()
self._has_symbolic_input_in_eager = False
if self._values and self._in_eager_mode:
# The presence of a graph tensor in `self._values` overrides the context.
for value in self._values:
if hasattr(value, "graph"):
self._has_symbolic_input_in_eager = True
self._name_scope = value.graph.name_scope(self._name)
def __enter__(self):
"""Start the scope block.
Returns:
The scope name.
Raises:
ValueError: if neither `name` nor `default_name` is provided
but `values` are.
"""
if self._has_symbolic_input_in_eager:
return self._name_scope.__enter__()
if self._in_eager_mode:
self._old_name = self._ctx.scope_name
if not self._name:
scope_name = ""
else:
cache_key = self._name, self._old_name, self._default_name
if cache_key in name_scope_cache:
self._ctx.scope_name = name_scope_cache[cache_key]
return self._ctx.scope_name
elif self._name[-1] == "/":
# A trailing slash breaks out of nested name scopes, indicating a
# fully specified scope name, for compatibility with Graph.name_scope.
scope_name = self._name
else:
name_with_trailing_slash = self._name + "/"
scope_name = (
self._old_name + name_with_trailing_slash
if self._old_name else name_with_trailing_slash)
name_scope_cache[cache_key] = scope_name
self._ctx.scope_name = scope_name
return scope_name
else:
if self._name is None and self._values is not None:
# We only raise an error if values is not None (provided) because
# currently tf.name_scope(None) (values=None then) is sometimes used as
# an idiom to reset to top scope.
raise ValueError(
"At least one of name (%s) and default_name (%s) must be provided."
% (self._name, self._default_name))
if self._values is None:
self._values = []
g = _get_graph_from_inputs(self._values)
self._g_manager = g.as_default()
self._g_manager.__enter__()
try:
self._name_scope = g.name_scope(self._name)
return self._name_scope.__enter__()
except:
self._g_manager.__exit__(*sys.exc_info())
raise
def __exit__(self, type_arg, value_arg, traceback_arg):
if self._has_symbolic_input_in_eager:
self._name_scope.__exit__(type_arg, value_arg, traceback_arg)
elif self._in_eager_mode:
self._ctx.scope_name = self._old_name
else:
self._name_scope.__exit__(type_arg, value_arg, traceback_arg)
self._g_manager.__exit__(type_arg, value_arg, traceback_arg)
return False # False values do not suppress exceptions
@tf_export("name_scope", v1=[])
class name_scope_v2(name_scope):
"""A context manager for use when defining a Python op.
This context manager pushes a name scope, which will make the name of all
operations added within it have a prefix.
For example, to define a new Python op called `my_op`:
```python
def my_op(a, b, c, name=None):
with tf.name_scope("MyOp") as scope:
a = tf.convert_to_tensor(a, name="a")
b = tf.convert_to_tensor(b, name="b")
c = tf.convert_to_tensor(c, name="c")
# Define some computation that uses `a`, `b`, and `c`.
return foo_op(..., name=scope)
```
When executed, the Tensors `a`, `b`, `c`, will have names `MyOp/a`, `MyOp/b`,
and `MyOp/c`.
If the scope name already exists, the name will be made unique by appending
`_n`. For example, calling `my_op` the second time will generate `MyOp_1/a`,
etc.
"""
def __init__(self, name):
"""Initialize the context manager.
Args:
name: The prefix to use on all names created within the name scope.
Raises:
ValueError: If name is None, or not a string.
"""
if name is None or not isinstance(name, six.string_types):
raise ValueError("name for name_scope must be a string.")
super(name_scope_v2, self).__init__(name=None, default_name=name)
def strip_name_scope(name, export_scope):
"""Removes name scope from a name.
Args:
name: A `string` name.
export_scope: Optional `string`. Name scope to remove.
Returns:
Name with name scope removed, or the original name if export_scope
is None.
"""
if export_scope:
if export_scope[-1] == "/":
export_scope = export_scope[:-1]
try:
# Strips export_scope/, export_scope///,
# ^export_scope/, loc:@export_scope/.
str_to_replace = r"([\^]|loc:@|^)" + export_scope + r"[\/]+(.*)"
return re.sub(str_to_replace, r"\1\2", compat.as_str(name), count=1)
except TypeError as e:
# If the name is not of a type we can process, simply return it.
logging.warning(e)
return name
else:
return name
def prepend_name_scope(name, import_scope):
"""Prepends name scope to a name.
Args:
name: A `string` name.
import_scope: Optional `string`. Name scope to add.
Returns:
Name with name scope added, or the original name if import_scope
is None.
"""
if import_scope:
if import_scope[-1] == "/":
import_scope = import_scope[:-1]
try:
str_to_replace = r"([\^]|loc:@|^)(.*)"
return re.sub(str_to_replace, r"\1" + import_scope + r"/\2",
compat.as_str(name))
except TypeError as e:
# If the name is not of a type we can process, simply return it.
logging.warning(e)
return name
else:
return name
# pylint: disable=g-doc-return-or-yield
# pylint: disable=not-context-manager
@tf_export(v1=["op_scope"])
@tf_contextlib.contextmanager
def op_scope(values, name, default_name=None):
"""DEPRECATED. Same as name_scope above, just different argument order."""
logging.warn("tf.op_scope(values, name, default_name) is deprecated,"
" use tf.name_scope(name, default_name, values)")
with name_scope(name, default_name=default_name, values=values) as scope:
yield scope
_proto_function_registry = registry.Registry("proto functions")
def register_proto_function(collection_name,
proto_type=None,
to_proto=None,
from_proto=None):
"""Registers `to_proto` and `from_proto` functions for collection_name.
`to_proto` function converts a Python object to the corresponding protocol
buffer, and returns the protocol buffer.
`from_proto` function converts protocol buffer into a Python object, and
returns the object..
Args:
collection_name: Name of the collection.
proto_type: Protobuf type, such as `saver_pb2.SaverDef`,
`variable_pb2.VariableDef`, `queue_runner_pb2.QueueRunnerDef`..
to_proto: Function that implements Python object to protobuf conversion.
from_proto: Function that implements protobuf to Python object conversion.
"""
if to_proto and not callable(to_proto):
raise TypeError("to_proto must be callable.")
if from_proto and not callable(from_proto):
raise TypeError("from_proto must be callable.")
_proto_function_registry.register((proto_type, to_proto, from_proto),
collection_name)
def get_collection_proto_type(collection_name):
"""Returns the proto_type for collection_name."""
try:
return _proto_function_registry.lookup(collection_name)[0]
except LookupError:
return None
def get_to_proto_function(collection_name):
"""Returns the to_proto function for collection_name."""
try:
return _proto_function_registry.lookup(collection_name)[1]
except LookupError:
return None
def get_from_proto_function(collection_name):
"""Returns the from_proto function for collection_name."""
try:
return _proto_function_registry.lookup(collection_name)[2]
except LookupError:
return None
def _operation_conversion_error(op, dtype=None, name=None, as_ref=False):
"""Produce a nice error if someone converts an Operation to a Tensor."""
raise TypeError(("Can't convert Operation '%s' to Tensor "
"(target dtype=%r, name=%r, as_ref=%r)") %
(op.name, dtype, name, as_ref))
def _op_to_colocate_with(v):
"""Operation object corresponding to v to use for colocation constraints."""
if v is None:
return None
if isinstance(v, Operation):
return v
# We always want to colocate with the reference op.
# When 'v' is a ResourceVariable, the reference op is the handle creating op.
#
# What this should be is:
# if isinstance(v, ResourceVariable):
# return v.handle.op
# However, that would require a circular import dependency.
# As of October 2018, there were attempts underway to remove
# colocation constraints altogether. Assuming that will
# happen soon, perhaps this hack to work around the circular
# import dependency is acceptable.
if hasattr(v, "handle") and hasattr(v.handle, "op") and isinstance(
v.handle.op, Operation):
return v.handle.op
return internal_convert_to_tensor_or_indexed_slices(v, as_ref=True).op
def _is_keras_symbolic_tensor(x):
return hasattr(x, "graph") and getattr(x.graph, "name", None) == "keras_graph"
register_tensor_conversion_function(Operation, _operation_conversion_error)
| [
"[email protected]"
]
| |
3fe4225dde822c52e3ce90dea38a7b7379b58f5c | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/EDataServer/SourceBackend.py | 7ddb94ea9928aaba64a496c76564214d8016cf91 | []
| no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 15,830 | py | # encoding: utf-8
# module gi.repository.EDataServer
# from /usr/lib64/girepository-1.0/EDataServer-1.2.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Gio as __gi_repository_Gio
import gi.repository.GObject as __gi_repository_GObject
import gi.repository.Soup as __gi_repository_Soup
import gobject as __gobject
from .SourceExtension import SourceExtension
class SourceBackend(SourceExtension):
"""
:Constructors:
::
SourceBackend(**properties)
"""
def bind_property(self, *args, **kwargs): # real signature unknown
pass
def bind_property_full(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def chain(self, *args, **kwargs): # real signature unknown
pass
def compat_control(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def connect(self, *args, **kwargs): # real signature unknown
pass
def connect_after(self, *args, **kwargs): # real signature unknown
pass
def connect_data(self, detailed_signal, handler, *data, **kwargs): # reliably restored by inspect
"""
Connect a callback to the given signal with optional user data.
:param str detailed_signal:
A detailed signal to connect to.
:param callable handler:
Callback handler to connect to the signal.
:param *data:
Variable data which is passed through to the signal handler.
:param GObject.ConnectFlags connect_flags:
Flags used for connection options.
:returns:
A signal id which can be used with disconnect.
"""
pass
def connect_object(self, *args, **kwargs): # real signature unknown
pass
def connect_object_after(self, *args, **kwargs): # real signature unknown
pass
def disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def disconnect_by_func(self, *args, **kwargs): # real signature unknown
pass
def dup_backend_name(self): # real signature unknown; restored from __doc__
""" dup_backend_name(self) -> str """
return ""
def emit(self, *args, **kwargs): # real signature unknown
pass
def emit_stop_by_name(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def find_property(self, property_name): # real signature unknown; restored from __doc__
""" find_property(self, property_name:str) -> GObject.ParamSpec """
pass
def force_floating(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def freeze_notify(self): # reliably restored by inspect
"""
Freezes the object's property-changed notification queue.
:returns:
A context manager which optionally can be used to
automatically thaw notifications.
This will freeze the object so that "notify" signals are blocked until
the thaw_notify() method is called.
.. code-block:: python
with obj.freeze_notify():
pass
"""
pass
def getv(self, names, values): # real signature unknown; restored from __doc__
""" getv(self, names:list, values:list) """
pass
def get_backend_name(self): # real signature unknown; restored from __doc__
""" get_backend_name(self) -> str """
return ""
def get_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def get_properties(self, *args, **kwargs): # real signature unknown
pass
def get_property(self, *args, **kwargs): # real signature unknown
pass
def get_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def get_source(self): # real signature unknown; restored from __doc__
""" get_source(self) -> EDataServer.Source """
pass
def handler_block(obj, handler_id): # reliably restored by inspect
"""
Blocks the signal handler from being invoked until
handler_unblock() is called.
:param GObject.Object obj:
Object instance to block handlers for.
:param int handler_id:
Id of signal to block.
:returns:
A context manager which optionally can be used to
automatically unblock the handler:
.. code-block:: python
with GObject.signal_handler_block(obj, id):
pass
"""
pass
def handler_block_by_func(self, *args, **kwargs): # real signature unknown
pass
def handler_disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def handler_is_connected(*args, **kwargs): # reliably restored by inspect
""" signal_handler_is_connected(instance:GObject.Object, handler_id:int) -> bool """
pass
def handler_unblock(*args, **kwargs): # reliably restored by inspect
""" signal_handler_unblock(instance:GObject.Object, handler_id:int) """
pass
def handler_unblock_by_func(self, *args, **kwargs): # real signature unknown
pass
def install_properties(self, pspecs): # real signature unknown; restored from __doc__
""" install_properties(self, pspecs:list) """
pass
def install_property(self, property_id, pspec): # real signature unknown; restored from __doc__
""" install_property(self, property_id:int, pspec:GObject.ParamSpec) """
pass
def interface_find_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_install_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_list_properties(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def is_floating(self): # real signature unknown; restored from __doc__
""" is_floating(self) -> bool """
return False
def list_properties(self): # real signature unknown; restored from __doc__
""" list_properties(self) -> list, n_properties:int """
return []
def newv(self, object_type, parameters): # real signature unknown; restored from __doc__
""" newv(object_type:GType, parameters:list) -> GObject.Object """
pass
def notify(self, property_name): # real signature unknown; restored from __doc__
""" notify(self, property_name:str) """
pass
def notify_by_pspec(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def override_property(self, property_id, name): # real signature unknown; restored from __doc__
""" override_property(self, property_id:int, name:str) """
pass
def property_lock(self): # real signature unknown; restored from __doc__
""" property_lock(self) """
pass
def property_unlock(self): # real signature unknown; restored from __doc__
""" property_unlock(self) """
pass
def ref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def ref_sink(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def ref_source(self): # real signature unknown; restored from __doc__
""" ref_source(self) -> EDataServer.Source """
pass
def replace_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def replace_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def run_dispose(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_backend_name(self, backend_name=None): # real signature unknown; restored from __doc__
""" set_backend_name(self, backend_name:str=None) """
pass
def set_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_properties(self, *args, **kwargs): # real signature unknown
pass
def set_property(self, *args, **kwargs): # real signature unknown
pass
def steal_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def steal_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def stop_emission(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def stop_emission_by_name(*args, **kwargs): # reliably restored by inspect
""" signal_stop_emission_by_name(instance:GObject.Object, detailed_signal:str) """
pass
def thaw_notify(self): # real signature unknown; restored from __doc__
""" thaw_notify(self) """
pass
def unref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def watch_closure(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def weak_ref(self, *args, **kwargs): # real signature unknown
pass
def _force_floating(self, *args, **kwargs): # real signature unknown
""" force_floating(self) """
pass
def _ref(self, *args, **kwargs): # real signature unknown
""" ref(self) -> GObject.Object """
pass
def _ref_sink(self, *args, **kwargs): # real signature unknown
""" ref_sink(self) -> GObject.Object """
pass
def _unref(self, *args, **kwargs): # real signature unknown
""" unref(self) """
pass
def _unsupported_data_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def _unsupported_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __deepcopy__(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, **properties): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
g_type_instance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
priv = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
qdata = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
ref_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gpointer__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__grefcount__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
props = None # (!) real value is '<gi._gi.GProps object at 0x7f626e886280>'
__class__ = None # (!) real value is "<class 'gi.types.GObjectMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': ObjectInfo(SourceBackend), '__module__': 'gi.repository.EDataServer', '__gtype__': <GType ESourceBackend (94877537050448)>, '__doc__': None, '__gsignals__': {}, 'dup_backend_name': gi.FunctionInfo(dup_backend_name), 'get_backend_name': gi.FunctionInfo(get_backend_name), 'set_backend_name': gi.FunctionInfo(set_backend_name), 'parent': <property object at 0x7f626e916860>, 'priv': <property object at 0x7f626e916950>})"
__gdoc__ = 'Object ESourceBackend\n\nProperties from ESourceBackend:\n backend-name -> gchararray: Backend Name\n The name of the backend handling the data source\n\nProperties from ESourceExtension:\n source -> ESource: Source\n The ESource being extended\n\nSignals from GObject:\n notify (GParam)\n\n'
__gsignals__ = {}
__gtype__ = None # (!) real value is '<GType ESourceBackend (94877537050448)>'
__info__ = ObjectInfo(SourceBackend)
| [
"[email protected]"
]
| |
bc4bb2ab64e0a17a47e1b6f43fd2b3437f721193 | 6cbaade56c5db347d1be9a3422a69af52df39b97 | /python_workspace/3_bigdata/02_Standardization_Analysis/03_DB/3db_update_rows.py | 5adb87cf5b4001047ddbf256047752fe4797d8e7 | []
| no_license | baewonje/iot_bigdata_- | b54e3772f64b9695efee8632183590628b679e11 | 2ce1af67d2f05abeb2ecd442b7299f349bdb9753 | refs/heads/master | 2020-09-06T09:53:53.018320 | 2019-12-06T08:19:33 | 2019-12-06T08:19:33 | 220,390,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,662 | py | # !/usr/bin/env python3
import csv
import sqlite3
import sys
# path to and name of a CSV input file
input_file = sys.argv[1] #data_for_updating.csv
# Creat an in-memory SQLite3 database
# Create a table called sales with four attributes
con = sqlite3.connect(':memory:')
query = """CREATE TABLE IF NOT EXISTS sales
(customer VARCHAR(20),
product VARCHAR(40),
amount FLOAT,
Date DATE);"""
con.execute(query)
con.commit()
# Insert a few rows of data into the table
data = [('Richard Lucas', 'Notepad', 2.50, '2014-01-02'),
('Jenny Kim', 'Binder', 4.15,'2014-01-15'),
('Svetlana Crow', 'Printer', 155.75,'2014-02-03'),
('Stephen Randolph', 'Computer', 679.40, '2014-02-20')]
for tuple in data:
print(tuple)
statement = "INSERT INTO sales VALUES(?, ?, ?, ?)"
con.executemany(statement, data)
con.commit()
# Read the CSV file and update the specific rows
file_reader = csv.reader(open(input_file, 'r'), delimiter=',')
header = next(file_reader, None)
for row in file_reader:
data = []
for column_index in range(len(header)):
data.append(row[column_index])
print(data)
con.execute("UPDATE sales SET amount=?, date=? WHERE customer=?;", data)
# update [테이블명] SET [필드명]= 변경값... where[필드명]=[필터링조건값];
# update는 반드시 where조건이 동반되어야 한다.
con.commit()
# Query the sales table
cursor = con.execute("SELECT * FROM sales")
rows = cursor.fetchall()
for row in rows:
output = []
for column_index in range(len(row)):
output.append(str(row[column_index]))
print(output) | [
"[email protected]"
]
| |
d2a94da24516c80d5f77b6421de322c3da5d2878 | 5e27c7f5426c169fd348b26e94b65c35f9cdc459 | /hiveguilib/HBlender/NodeItemManager.py | 4b7b29f0dc48b1dc592c47a1c27d9ff0efbb321a | [
"BSD-2-Clause"
]
| permissive | agoose77/hivesystem | e2c9c27408233b5794151ca74f541d2e6063d58a | e1f55c5ea530a989477edb896dcd89f3926a31b8 | refs/heads/master | 2020-07-21T23:07:37.178856 | 2014-08-23T02:13:19 | 2014-08-23T02:13:19 | 20,776,359 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,172 | py | import bpy
from . import level
class NodeItem:
"""Operator entry within the Add node menu"""
def __init__(self, manager, key, fullkey):
self.manager = manager
self.key = key
self.fullkey = fullkey
def _active(self, context):
if context.space_data.edit_tree is None:
return False
if context.space_data.edit_tree.name not in self.manager._nodeitem_trees[self.fullkey]:
return False
if not level.active(context, tuple(self.fullkey.split("."))):
return
return True
def draw(self, layout, context):
default_context = bpy.app.translations.contexts.default
props = layout.operator("node.add_hive_node", text=self.key, text_ctxt=default_context)
props.type = self.fullkey
class NodeItemMenu:
"""Menu entry within the Add node menu"""
name = "NODE_MT_HIVE"
def __init__(self, title, fullname, make_panel=False):
if title is not None:
assert fullname is not None
self.title = title
self.fullname = fullname
self.children = []
def menudraw(struct, context):
if not level.active(context, self.fullname):
return
return self.draw(struct.layout, context)
cls_dict = dict(bl_space_type='NODE_EDITOR', bl_label="<HiveMenu>", draw=menudraw, poll=self.poll)
name = self.name
if self.fullname is not None:
name = self.name + "_" + "_".join(self.fullname)
self.name = name
self.menu_class = type(name, (bpy.types.Menu,), cls_dict)
if make_panel:
type_name = name.replace("NODE_MT_", "NODE_PT_")
cls_dict = dict(bl_space_type='NODE_EDITOR', bl_label=title, bl_region_type='TOOLS',
bl_options={'DEFAULT_CLOSED'}, poll=self._active, draw=menudraw)
self.panel_class = type(type_name, (bpy.types.Panel,), cls_dict)
else:
self.panel_class = None
def register(self):
if self.panel_class is not None:
bpy.utils.register_class(self.panel_class)
bpy.utils.register_class(self.menu_class)
def unregister(self):
if self.panel_class is not None:
bpy.utils.unregister_class(self.panel_class)
bpy.utils.unregister_class(self.menu_class)
def _active(self, context):
if not level.active(context, self.fullname):
return False
for child in self.children:
if child._active(context):
return True
return False
def draw(self, layout, context):
col = layout.column()
for child in self.children:
if not child._active(context):
continue
if isinstance(child, NodeItemMenu):
layout.menu(self.name + "_" + child.title, text=child.title)
else:
child.draw(col, context)
@classmethod
def poll(menucls, context):
return False
class NodeItemManager:
def __init__(self):
self._nodeitem_objects = NodeItemMenu(None, None)
self._nodeitems = {}
self._nodeitem_names = []
self._nodeitem_trees = {}
def append(self, node_tree_name, path):
full_path = ".".join(path)
if full_path not in self._nodeitem_names:
self._nodeitem_names.append(full_path)
self._nodeitem_trees[full_path] = []
item = NodeItem(self, path[-1], full_path)
self._nodeitems[path] = item
child = item
for key_index in range(len(path) - 1, 0, -1):
path_slice = path[:key_index]
if path_slice not in self._nodeitems:
path_component = path[key_index - 1]
make_panel = (key_index == 1)
menu = NodeItemMenu(path_component, path_slice, make_panel)
menu.register()
self._nodeitems[path_slice] = menu
else:
menu = self._nodeitems[path_slice]
if child not in menu.children:
menu.children.append(child)
child = menu
if child not in self._nodeitem_objects.children:
self._nodeitem_objects.children.append(child)
self._nodeitem_trees[full_path].append(node_tree_name)
def remove(self, node_tree_name, key):
# TODO implement nodeitem remove
raise NotImplementedError
def rename(self, old_node_tree_name, new_node_tree_name):
for full_key, node_trees in self._nodeitem_trees.items():
if not old_node_tree_name in node_trees:
continue
node_trees[node_trees.index(old_node_tree_name)] = new_node_tree_name
def draw_menu(self, struct, context):
menu = self._nodeitem_objects
if not menu._active(context):
return
menu.draw(struct.layout, context)
def register(self):
bpy.types.NODE_MT_add.append(self.draw_menu)
def unregister(self):
bpy.types.NODE_MT_add.remove(self.draw_menu) | [
"[email protected]"
]
| |
a6c8ba694d4221f97b510caa34d07f1f90b4820c | 196137e16065125b854f00509849aaf4bd2d0394 | /account/urls.py | 470f067ac27933fb4a7774b6324375f32870c869 | []
| no_license | ksuvarna85/unicode_quiz | 2f74f20fa5327a43e85a3e20538e9c3620a1e4dc | d45df6be213b44d1704163c7137d96c8695bfced | refs/heads/master | 2022-12-27T06:04:37.912161 | 2020-10-16T05:17:22 | 2020-10-16T05:17:22 | 296,305,870 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,110 | py | from django.contrib import admin
from django.urls import path,include
from account import views
app_name='account'
urlpatterns=[
path('register/',views.student_register,name='student_register'),
path('teacher_register/',views.teacher_register,name='teacher'),
path('login/',views.user_login,name='user_login'),
path('teacher_list/',views.ChapterListView.as_view(),name='list'),
path('teacher_result/<int:chp_pk>/',views.student_result,name='result_teacher'),
path('teacher_list/<int:pk>/',views.QuestionDetailView.as_view(),name='detail'),
path('addquestion/',views.questionform_view,name='add'),
path('update/<int:pk>/',views.QuestionUpdateView.as_view(),name='update'),
path('create/',views.ChapterCreateView.as_view(),name='create'),
path('student/<int:student_pk>/',views.student_chp_lst,name='list_fun'),
path('student/<int:student_pk>/<int:chp_pk>/',views.question_detail,name='detail_fun'),
path('delete/<int:pk>/',views.ChapterDeleteView.as_view(),name='delete'),
path('student_result/<int:student_pk>/<int:chp_pk>/',views.result,name='result'),
]
| [
"[email protected]"
]
| |
6fcf9ce530e29da59a5d08eb43a350cdeae05c2e | cd65ec1046a2f02fdb34257e0244dcde1da5a90a | /lextab.py | b8e4a8bdf58397f527291e24143fc491c32d32df | []
| no_license | pritomrajkhowa/WEB_VfPbP | 8999dfa89723a3e50350259ebc86a1a59020c5ca | 37f6354299bb18c350da8708dd64f43ec172c854 | refs/heads/main | 2022-12-24T21:14:38.477151 | 2020-10-04T04:53:50 | 2020-10-04T04:53:50 | 301,031,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,074 | py | # lextab.py. This file automatically created by PLY (version 3.10). Don't edit!
_tabversion = '3.10'
_lextokens = set(('EQ', 'OREQUAL', 'INT_CONST_BIN', 'ELLIPSIS', 'MINUSEQUAL', 'DIVIDE', 'GT', 'ELSE', 'PLUS', 'STATIC', 'WCHAR_CONST', 'CASE', 'CHAR', 'CONTINUE', 'STRUCT', 'RETURN', 'REGISTER', '_BOOL', 'PPPRAGMASTR', 'CHAR_CONST', 'XOR', 'VOLATILE', 'RSHIFT', 'LNOT', 'XOREQUAL', 'LAND', 'ANDEQUAL', 'INT_CONST_DEC', 'SIGNED', 'COLON', 'GE', 'BREAK', 'SWITCH', 'PLUSPLUS', 'TIMES', 'LE', 'MODEQUAL', 'SHORT', 'WHILE', 'TYPEID', 'PLUSEQUAL', 'UNSIGNED', 'NE', 'TYPEDEF', 'DO', 'RPAREN', 'UNION', 'LPAREN', 'DOUBLE', '_COMPLEX', 'OR', 'RSHIFTEQUAL', 'RBRACE', 'PPHASH', 'CONST', 'LSHIFTEQUAL', 'PERIOD', 'OFFSETOF', 'LOR', 'LSHIFT', 'LT', 'IF', 'ARROW', 'INLINE', 'RBRACKET', 'EXTERN', 'EQUALS', 'COMMA', 'AND', 'SIZEOF', 'HEX_FLOAT_CONST', 'STRING_LITERAL', 'VOID', 'FLOAT', 'RESTRICT', 'WSTRING_LITERAL', 'DIVEQUAL', 'FLOAT_CONST', 'DEFAULT', 'LONG', 'INT_CONST_OCT', 'LBRACKET', 'NOT', 'GOTO', 'ENUM', 'MOD', 'INT', 'CONDOP', 'LBRACE', 'MINUSMINUS', 'FOR', 'PPPRAGMA', 'TIMESEQUAL', 'AUTO', 'ID', 'SEMI', '__INT128', 'INT_CONST_HEX', 'MINUS'))
_lexreflags = 64
_lexliterals = ''
_lexstateinfo = {'INITIAL': 'inclusive', 'ppline': 'exclusive', 'pppragma': 'exclusive'}
_lexstatere = {'INITIAL': [('(?P<t_PPHASH>[ \\t]*\\#)|(?P<t_NEWLINE>\\n+)|(?P<t_LBRACE>\\{)|(?P<t_RBRACE>\\})|(?P<t_FLOAT_CONST>((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P<t_HEX_FLOAT_CONST>(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P<t_INT_CONST_HEX>0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_INT_CONST_BIN>0[bB][01]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_BAD_CONST_OCT>0[0-7]*[89])|(?P<t_INT_CONST_OCT>0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P<t_INT_CONST_DEC>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_CHAR_CONST>\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P<t_WCHAR_CONST>L\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P<t_UNMATCHED_QUOTE>(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P<t_BAD_CHAR_CONST>(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))|(?P<t_WSTRING_LITERAL>L"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_BAD_STRING_LITERAL>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*?([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ID>[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P<t_STRING_LITERAL>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ELLIPSIS>\\.\\.\\.)|(?P<t_LOR>\\|\\|)|(?P<t_PLUSPLUS>\\+\\+)|(?P<t_LSHIFTEQUAL><<=)|(?P<t_OREQUAL>\\|=)|(?P<t_PLUSEQUAL>\\+=)|(?P<t_RSHIFTEQUAL>>>=)|(?P<t_TIMESEQUAL>\\*=)|(?P<t_XOREQUAL>\\^=)|(?P<t_ANDEQUAL>&=)|(?P<t_ARROW>->)|(?P<t_CONDOP>\\?)|(?P<t_DIVEQUAL>/=)|(?P<t_EQ>==)|(?P<t_GE>>=)|(?P<t_LAND>&&)|(?P<t_LBRACKET>\\[)|(?P<t_LE><=)|(?P<t_LPAREN>\\()|(?P<t_LSHIFT><<)|(?P<t_MINUSEQUAL>-=)|(?P<t_MINUSMINUS>--)|(?P<t_MODEQUAL>%=)|(?P<t_NE>!=)|(?P<t_OR>\\|)|(?P<t_PERIOD>\\.)|(?P<t_PLUS>\\+)|(?P<t_RBRACKET>\\])|(?P<t_RPAREN>\\))|(?P<t_RSHIFT>>>)|(?P<t_TIMES>\\*)|(?P<t_XOR>\\^)|(?P<t_AND>&)|(?P<t_COLON>:)|(?P<t_COMMA>,)|(?P<t_DIVIDE>/)|(?P<t_EQUALS>=)|(?P<t_GT>>)|(?P<t_LNOT>!)|(?P<t_LT><)|(?P<t_MINUS>-)|(?P<t_MOD>%)|(?P<t_NOT>~)|(?P<t_SEMI>;)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX'), None, None, None, None, None, None, None, ('t_INT_CONST_BIN', 'INT_CONST_BIN'), None, None, None, None, None, None, None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL'), None, None, None, None, None, None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, None, None, None, None, (None, 'ELLIPSIS'), (None, 'LOR'), (None, 'PLUSPLUS'), (None, 'LSHIFTEQUAL'), (None, 'OREQUAL'), (None, 'PLUSEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'TIMESEQUAL'), (None, 'XOREQUAL'), (None, 'ANDEQUAL'), (None, 'ARROW'), (None, 'CONDOP'), (None, 'DIVEQUAL'), (None, 'EQ'), (None, 'GE'), (None, 'LAND'), (None, 'LBRACKET'), (None, 'LE'), (None, 'LPAREN'), (None, 'LSHIFT'), (None, 'MINUSEQUAL'), (None, 'MINUSMINUS'), (None, 'MODEQUAL'), (None, 'NE'), (None, 'OR'), (None, 'PERIOD'), (None, 'PLUS'), (None, 'RBRACKET'), (None, 'RPAREN'), (None, 'RSHIFT'), (None, 'TIMES'), (None, 'XOR'), (None, 'AND'), (None, 'COLON'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'EQUALS'), (None, 'GT'), (None, 'LNOT'), (None, 'LT'), (None, 'MINUS'), (None, 'MOD'), (None, 'NOT'), (None, 'SEMI')])], 'ppline': [('(?P<t_ppline_FILENAME>"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P<t_ppline_LINE_NUMBER>(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P<t_ppline_NEWLINE>\\n)|(?P<t_ppline_PPLINE>line)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P<t_pppragma_NEWLINE>\\n)|(?P<t_pppragma_PPPRAGMA>pragma)|(?P<t_pppragma_STR>.+)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR')])]}
_lexstateignore = {'INITIAL': ' \t', 'ppline': ' \t', 'pppragma': ' \t'}
_lexstateerrorf = {'INITIAL': 't_error', 'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error'}
_lexstateeoff = {}
| [
"[email protected]"
]
| |
e59211370261a20210a37aca73990884fc1ae746 | 8606267410dabfeacb4b7ff285a8d2250c139acc | /store/migrations/0001_initial.py | 02752dd9c03ef9ffa7bb9baf7b45f72a0984e8d2 | []
| no_license | Taraltinu/chopping-Site | a5e6f6eeeecb4fef92f90770a3c2493eca0f0bde | 1b722d53de1baaa5780701416f78dab62ef7d057 | refs/heads/master | 2022-12-20T07:06:16.602476 | 2020-10-02T18:07:31 | 2020-10-02T18:07:31 | 300,697,693 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | # Generated by Django 3.1.1 on 2020-09-11 10:02
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Produc',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('price', models.IntegerField()),
('image', models.ImageField(upload_to='products/')),
('description', models.CharField(max_length=300)),
],
),
]
| [
"[email protected]"
]
| |
96a7ae82d67e86eaff22d1a70044f9a415631424 | b126ea44cd0fa28d06d5d41a1d302a0ec718ca75 | /env/bin/wheel | d190a661dc561e88da4641f80fcade79163b4144 | []
| no_license | juned8236/Celery | 475c4c84c5097c08ace9f13e58c7ed567ea6ea58 | 7ce61ccf4ec295c02a96f085b0769628e56bf4b3 | refs/heads/master | 2021-02-03T22:55:08.596841 | 2020-02-28T10:58:40 | 2020-02-28T10:58:40 | 243,565,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | #!/home/juned8236/Desktop/reactPractise/celery/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
5333cb7e40f6f61d1f108d164fb66a2042e93863 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AlipayOpenMiniInnerVersionproportionModifyModel.py | b4cc7d37f14d975472b8a0fb77f70ed09f8bac46 | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 3,311 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenMiniInnerVersionproportionModifyModel(object):
def __init__(self):
self._app_origin = None
self._bundle_id = None
self._dev_id = None
self._mini_app_id = None
self._operate_id = None
self._value = None
@property
def app_origin(self):
return self._app_origin
@app_origin.setter
def app_origin(self, value):
self._app_origin = value
@property
def bundle_id(self):
return self._bundle_id
@bundle_id.setter
def bundle_id(self, value):
self._bundle_id = value
@property
def dev_id(self):
return self._dev_id
@dev_id.setter
def dev_id(self, value):
self._dev_id = value
@property
def mini_app_id(self):
return self._mini_app_id
@mini_app_id.setter
def mini_app_id(self, value):
self._mini_app_id = value
@property
def operate_id(self):
return self._operate_id
@operate_id.setter
def operate_id(self, value):
self._operate_id = value
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
def to_alipay_dict(self):
params = dict()
if self.app_origin:
if hasattr(self.app_origin, 'to_alipay_dict'):
params['app_origin'] = self.app_origin.to_alipay_dict()
else:
params['app_origin'] = self.app_origin
if self.bundle_id:
if hasattr(self.bundle_id, 'to_alipay_dict'):
params['bundle_id'] = self.bundle_id.to_alipay_dict()
else:
params['bundle_id'] = self.bundle_id
if self.dev_id:
if hasattr(self.dev_id, 'to_alipay_dict'):
params['dev_id'] = self.dev_id.to_alipay_dict()
else:
params['dev_id'] = self.dev_id
if self.mini_app_id:
if hasattr(self.mini_app_id, 'to_alipay_dict'):
params['mini_app_id'] = self.mini_app_id.to_alipay_dict()
else:
params['mini_app_id'] = self.mini_app_id
if self.operate_id:
if hasattr(self.operate_id, 'to_alipay_dict'):
params['operate_id'] = self.operate_id.to_alipay_dict()
else:
params['operate_id'] = self.operate_id
if self.value:
if hasattr(self.value, 'to_alipay_dict'):
params['value'] = self.value.to_alipay_dict()
else:
params['value'] = self.value
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenMiniInnerVersionproportionModifyModel()
if 'app_origin' in d:
o.app_origin = d['app_origin']
if 'bundle_id' in d:
o.bundle_id = d['bundle_id']
if 'dev_id' in d:
o.dev_id = d['dev_id']
if 'mini_app_id' in d:
o.mini_app_id = d['mini_app_id']
if 'operate_id' in d:
o.operate_id = d['operate_id']
if 'value' in d:
o.value = d['value']
return o
| [
"[email protected]"
]
| |
2f8dcf3655a9ae3cdee7dbe1d09991ec55257159 | e3bdb7844f634efd89109079d22cade713c4899d | /openapi_client/models/void_transaction_all_of.py | ce89ea1172ba74575cfc5d49430f63a171e6cf8d | []
| no_license | pc-coholic/Python | 5170c27da09b066c353e09539e404961f7ad50b7 | b7251c31339b579f71fb7ee9db05be51e9e43361 | refs/heads/master | 2023-04-19T02:42:02.914726 | 2021-04-26T16:07:37 | 2021-04-26T16:07:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,420 | py | # coding: utf-8
"""
Payment Gateway API Specification.
The documentation here is designed to provide all of the technical guidance required to consume and integrate with our APIs for payment processing. To learn more about our APIs please visit https://docs.firstdata.com/org/gateway. # noqa: E501
The version of the OpenAPI document: 21.2.0.20210406.001
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class VoidTransactionAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'transaction_amount': 'Amount'
}
attribute_map = {
'transaction_amount': 'transactionAmount'
}
def __init__(self, transaction_amount=None): # noqa: E501
"""VoidTransactionAllOf - a model defined in OpenAPI""" # noqa: E501
self._transaction_amount = None
self.discriminator = None
if transaction_amount is not None:
self.transaction_amount = transaction_amount
@property
def transaction_amount(self):
"""Gets the transaction_amount of this VoidTransactionAllOf. # noqa: E501
:return: The transaction_amount of this VoidTransactionAllOf. # noqa: E501
:rtype: Amount
"""
return self._transaction_amount
@transaction_amount.setter
def transaction_amount(self, transaction_amount):
"""Sets the transaction_amount of this VoidTransactionAllOf.
:param transaction_amount: The transaction_amount of this VoidTransactionAllOf. # noqa: E501
:type: Amount
"""
self._transaction_amount = transaction_amount
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, VoidTransactionAllOf):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
fbc08241b0dfb136f94f6e6040620b9099350cbd | 1811d37ed6474ab7eaeafff3c82d3bb7c0466e3d | /parts/zodiac/zope/interface/declarations.py | b29efbd9678e5ce7f21d159b9f3635b127f760a8 | []
| no_license | bernatcortina/zodiac | ed384fe96f6739d841a3a777d10bad4b33fd0e78 | aa0ecb2c386fc5b54ff60ba94e0a1bc5a7493f17 | refs/heads/master | 2021-01-18T14:02:44.978553 | 2014-02-07T17:33:27 | 2014-02-07T17:33:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | /Users/Bernat/GitHub/zodiac/eggs/zope.interface-4.0.5-py2.7-macosx-10.9-intel.egg/zope/interface/declarations.py | [
"[email protected]"
]
| |
d14efda8cfee2e7edb7d3d875f12d4848bbe9367 | b06978b6020ce3240912ba5c131c4f38a86d7996 | /Pycharm_files/Midterm_Complete/midterm/loopspractice.py | 426dbf5a01a6f3106e0febdb17ac5ac94fd9fff0 | []
| no_license | mn4774jm/PycharmProjects | 95dc8ee6b89a85ba02d4134aa5b5bce11004647b | 886bcf2400abc9a1f797fe98d09241f99fa16322 | refs/heads/master | 2021-08-09T10:20:27.907847 | 2020-09-04T15:21:21 | 2020-09-04T15:21:21 | 219,878,503 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | streets = ['Lake', 'Hennipen', 'Lyndale']
print(street)
print | [
"[email protected]"
]
| |
7fa572d27d6541c78c9f0fa12047679f64d428d0 | 6efb8ca0d1a48edf1335e8fd046ef79072282b9c | /实验/6章/例题/6-10a-to-A.py | f9a13bef6eb3c68edbdf57fa62cbc38546a813f2 | []
| no_license | RedheatWei/python-study | f0c35afd7325982568f554f4eded6a75f9eb8b49 | 3a7dc64028e5246198d7a64c1dc9ee318992020e | refs/heads/master | 2021-01-01T17:13:35.156410 | 2019-07-29T09:18:52 | 2019-07-29T09:18:52 | 98,027,727 | 0 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 168 | py | #!/usr/bin/env python
#_*_ coding:utf-8 _*_
'''
Created on 2015Äê2ÔÂ26ÈÕ
@author: Redheat
'''
str_first = raw_input('Enter a string:')
print str_first.swapcase() | [
"[email protected]"
]
| |
e58c92bd32e5a53043d10fe57e30d8a17565d77d | 3f12998434f55b3a5eda3f5e934ea692f73450b6 | /verify_signature.py | f4f55360302712f91059341392a956dc534eaf6d | [
"MIT"
]
| permissive | Genxster1998/android_universal | 1ad3dd60233b5993eb05a20f9c832182a8b8f3f1 | 6929fb413b2f9aa789546f0cc9e22e2ab8407634 | refs/heads/master | 2022-02-15T18:57:35.009677 | 2019-08-28T12:23:42 | 2019-08-28T12:23:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,629 | py | #!/usr/bin/env python3
# Dump Android Verified Boot Signature (c) B.Kerler 2017-2018
import hashlib
import struct
from binascii import hexlify,unhexlify
import sys
import argparse
from Crypto.Util.asn1 import DerSequence
from Crypto.PublicKey import RSA
from root.scripts.Library.avbtool3 import *
from root.scripts.Library.utils import *
import json
version="v1.7"
def extract_hash(pub_key,data):
hashlen = 32 #SHA256
encrypted = int(hexlify(data),16)
decrypted = hex(pow(encrypted, pub_key.e, pub_key.n))[2:]
if len(decrypted)%2!=0:
decrypted="0"+decrypted
decrypted=unhexlify(decrypted)
hash = decrypted[-hashlen:]
if (decrypted[-0x21:-0x20] != b'\x20') or (len(hash) != hashlen):
raise Exception('Signature error')
return hash
def dump_signature(data):
if data[0:2] == b'\x30\x82':
slen = struct.unpack('>H', data[2:4])[0]
total = slen + 4
cert = struct.unpack('<%ds' % total, data[0:total])[0]
der = DerSequence()
der.decode(cert)
cert0 = DerSequence()
cert0.decode(bytes(der[1]))
pk = DerSequence()
pk.decode(bytes(cert0[0]))
subjectPublicKeyInfo = pk[6]
meta = DerSequence().decode(bytes(der[3]))
name = meta[0][2:]
length = meta[1]
signature = bytes(der[4])[4:0x104]
pub_key = RSA.importKey(subjectPublicKeyInfo)
hash=extract_hash(pub_key,signature)
return [name,length,hash,pub_key,bytes(der[3])[1:2]]
class androidboot:
magic="ANDROID!" #BOOT_MAGIC_SIZE 8
kernel_size=0
kernel_addr=0
ramdisk_size=0
ramdisk_addr=0
second_addr=0
second_size=0
tags_addr=0
page_size=0
qcdt_size=0
os_version=0
name="" #BOOT_NAME_SIZE 16
cmdline="" #BOOT_ARGS_SIZE 512
id=[] #uint*8
extra_cmdline="" #BOOT_EXTRA_ARGS_SIZE 1024
def getheader(inputfile):
param = androidboot()
with open(inputfile, 'rb') as rf:
header = rf.read(0x660)
fields = struct.unpack('<8sIIIIIIIIII16s512s8I1024s', header)
param.magic = fields[0]
param.kernel_size = fields[1]
param.kernel_addr = fields[2]
param.ramdisk_size = fields[3]
param.ramdisk_addr = fields[4]
param.second_size = fields[5]
param.second_addr = fields[6]
param.tags_addr = fields[7]
param.page_size = fields[8]
param.qcdt_size = fields[9]
param.os_version = fields[10]
param.name = fields[11]
param.cmdline = fields[12]
param.id = [fields[13],fields[14],fields[15],fields[16],fields[17],fields[18],fields[19],fields[20]]
param.extra_cmdline = fields[21]
return param
def int_to_bytes(x):
return x.to_bytes((x.bit_length() + 7) // 8, 'big')
def rotstate(state):
if state==0:
print("AVB-Status: VERIFIED, 0")
else:
print("AVB-Status: RED, 3 or ORANGE, 1")
def main(argv):
info="Boot Signature Tool "+version+" (c) B.Kerler 2017-2019"
print("\n"+info)
print("----------------------------------------------")
parser = argparse.ArgumentParser(description=info)
parser.add_argument('--file','-f', dest='filename', default="boot.img", action='store', help='boot or recovery image filename')
parser.add_argument('--vbmeta','-v', dest='vbmeta', action='store', default='vbmeta.img', help='vbmeta partition')
parser.add_argument('--length', '-l', dest='inject', action='store_true', default=False, help='adapt signature length')
args = parser.parse_args()
if args.filename=="":
print("Usage: verify_signature.py -f [boot.img]")
exit(0)
param=getheader(args.filename)
kernelsize = int((param.kernel_size + param.page_size - 1) / param.page_size) * param.page_size
ramdisksize = int((param.ramdisk_size + param.page_size - 1) / param.page_size) * param.page_size
secondsize = int((param.second_size + param.page_size - 1) / param.page_size) * param.page_size
qcdtsize = int((param.qcdt_size + param.page_size - 1) / param.page_size) * param.page_size
print("Kernel=0x%08X,\tlength=0x%08X" % (param.page_size, kernelsize))
print("Ramdisk=0x%08X,\tlength=0x%08X" % ((param.page_size+kernelsize),ramdisksize))
print("Second=0x%08X,\tlength=0x%08X" % ((param.page_size+kernelsize+ramdisksize),secondsize))
print("QCDT=0x%08X,\tlength=0x%08X" % ((param.page_size+kernelsize+ramdisksize+secondsize),qcdtsize))
length=param.page_size+kernelsize+ramdisksize+secondsize+qcdtsize
print("Signature start=0x%08X" % length)
with open(args.filename,'rb') as fr:
data=fr.read()
filesize=os.stat(args.filename).st_size
footerpos=(filesize//0x1000*0x1000)-AvbFooter.SIZE
if data[footerpos:footerpos+4]==b"AVBf":
ftr=AvbFooter(data[footerpos:footerpos+AvbFooter.SIZE])
signature=data[ftr.vbmeta_offset:]
data=data[0:ftr.vbmeta_offset]
avbhdr=AvbVBMetaHeader(signature[:AvbVBMetaHeader.SIZE])
release_string=avbhdr.release_string.replace(b"\x00",b"").decode('utf-8')
print(f"\nAVB >=2.0 vbmeta detected: {release_string}\n----------------------------------------")
if not os.path.exists(args.vbmeta):
print("For avbv2, vbmeta.img is needed. Please use argument --vbmeta [vbmeta.img path].")
exit(0)
if " 1.0" not in release_string and " 1.1" not in release_string:
print("Sorry, only avb version <=1.1 is currently implemented")
exit(0)
hashdata=signature[avbhdr.SIZE:]
imgavbhash=AvbHashDescriptor(hashdata)
print("Image-Target: \t\t\t\t" + str(imgavbhash.partition_name.decode('utf-8')))
# digest_size = len(hashlib.new(name=avbhash.hash_algorithm).digest())
# digest_padding = round_to_pow2(digest_size) - digest_size
# block_size=4096
# (hash_level_offsets, tree_size) = calc_hash_level_offsets(avbhash.image_size, block_size, digest_size + digest_padding)
# root_digest, hash_tree = generate_hash_tree(fr, avbhash.image_size, block_size, avbhash.hash_algorithm, avbhash.salt, digest_padding, hash_level_offsets, tree_size)
ctx=hashlib.new(name=imgavbhash.hash_algorithm.decode('utf-8'))
ctx.update(imgavbhash.salt)
ctx.update(data[:imgavbhash.image_size])
root_digest=ctx.digest()
print("Salt: \t\t\t\t\t" + str(hexlify(imgavbhash.salt).decode('utf-8')))
print("Image-Size: \t\t\t\t" + hex(imgavbhash.image_size))
img_digest=str(hexlify(root_digest).decode('utf-8'))
img_avb_digest=str(hexlify(imgavbhash.digest).decode('utf-8'))
print("\nCalced Image-Hash: \t\t\t" + img_digest)
#print("Calced Hash_Tree: " + str(binascii.hexlify(hash_tree)))
print("Image-Hash: \t\t\t\t" + img_avb_digest)
avbmetacontent={}
vbmeta=None
if args.vbmeta=="":
if os.path.exists("vbmeta.img"):
args.vbmetaname="vbmeta.img"
if args.vbmeta!="":
with open(args.vbmeta,'rb') as vbm:
vbmeta=vbm.read()
avbhdr=AvbVBMetaHeader(vbmeta[:AvbVBMetaHeader.SIZE])
if avbhdr.magic!=b'AVB0':
print("Unknown vbmeta data")
exit(0)
class authentication_data(object):
def __init__(self,hdr,data):
self.hash=data[0x100+hdr.hash_offset:0x100+hdr.hash_offset+hdr.hash_size]
self.signature=data[0x100+hdr.signature_offset:0x100+hdr.signature_offset+hdr.signature_size]
class auxilary_data(object):
def __init__(self, hdr, data):
self.data=data[0x100+hdr.authentication_data_block_size:0x100+hdr.authentication_data_block_size+hdr.auxiliary_data_block_size]
authdata=authentication_data(avbhdr,vbmeta)
auxdata=auxilary_data(avbhdr,vbmeta).data
auxlen=len(auxdata)
i=0
while (i<auxlen):
desc=AvbDescriptor(auxdata[i:])
data=auxdata[i:]
if desc.tag==AvbPropertyDescriptor.TAG:
avbproperty=AvbPropertyDescriptor(data)
avbmetacontent["property"]=dict(avbproperty=avbproperty)
elif desc.tag==AvbHashtreeDescriptor.TAG:
avbhashtree=AvbHashtreeDescriptor(data)
partition_name=avbhashtree.partition_name
salt=avbhashtree.salt
root_digest=avbhashtree.root_digest
avbmetacontent[partition_name]=dict(salt=salt,root_digest=root_digest)
elif desc.tag==AvbHashDescriptor.TAG:
avbhash=AvbHashDescriptor(data)
partition_name=avbhash.partition_name
salt=avbhash.salt
digest=avbhash.digest
avbmetacontent[partition_name] = dict(salt=salt,digest=digest)
elif desc.tag==AvbKernelCmdlineDescriptor.TAG:
avbcmdline=AvbKernelCmdlineDescriptor(data)
kernel_cmdline=avbcmdline.kernel_cmdline
avbmetacontent["cmdline"] = dict(kernel_cmdline=kernel_cmdline)
elif desc.tag==AvbChainPartitionDescriptor.TAG:
avbchainpartition=AvbChainPartitionDescriptor(data)
partition_name=avbchainpartition.partition_name
public_key=avbchainpartition.public_key
avbmetacontent[partition_name] = dict(public_key=public_key)
i += desc.SIZE+len(desc.data)
vbmeta_digest=None
if imgavbhash.partition_name in avbmetacontent:
if "digest" in avbmetacontent[imgavbhash.partition_name]:
digest=avbmetacontent[imgavbhash.partition_name]["digest"]
vbmeta_digest = str(hexlify(digest).decode('utf-8'))
print("VBMeta-Image-Hash: \t\t\t" + vbmeta_digest)
else:
print("Couldn't find "+imgavbhash.partition_name+" in "+args.vbmetaname)
exit(0)
if vbmeta!=None:
pubkeydata=vbmeta[AvbVBMetaHeader.SIZE+avbhdr.authentication_data_block_size+avbhdr.public_key_offset:
AvbVBMetaHeader.SIZE+avbhdr.authentication_data_block_size+avbhdr.public_key_offset
+avbhdr.public_key_size]
modlen = struct.unpack(">I",pubkeydata[:4])[0]//4
n0inv = struct.unpack(">I", pubkeydata[4:8])[0]
modulus=hexlify(pubkeydata[8:8+modlen]).decode('utf-8')
print("\nSignature-RSA-Modulus (n):\t"+modulus)
print("Signature-n0inv: \t\t\t" + str(n0inv))
res=test_key(modulus)
if res!="":
print("\n"+res+"\n!!!! We have a signing key, yay !!!!")
else:
print("VBMeta info missing... please copy vbmeta.img to the directory.")
state=3
if img_digest==img_avb_digest:
state=0
if vbmeta_digest!=None:
if vbmeta_digest==img_digest:
state=0
else:
state=3
rotstate(state)
exit(0)
else:
signature=data[length:]
data=data[:length]
sha256 = hashlib.sha256()
sha256.update(data)
try:
target,siglength,hash,pub_key,flag=dump_signature(signature)
except:
print("No signature found :/")
exit(0)
id=hexlify(data[576:576+32])
print("\nID: "+id.decode('utf-8'))
print("Image-Target: "+str(target))
print("Image-Size: "+hex(length))
print("Signature-Size: "+hex(siglength))
meta=b"\x30"+flag+b"\x13"+bytes(struct.pack('B',len(target)))+target+b"\x02\x04"+bytes(struct.pack(">I",length))
#print(meta)
sha256.update(meta)
digest=sha256.digest()
print("\nCalced Image-Hash:\t"+hexlify(digest).decode('utf8'))
print("Signature-Hash:\t\t" + hexlify(hash).decode('utf8'))
if str(hexlify(digest))==str(hexlify(hash)):
rotstate(0)
else:
rotstate(3)
modulus=int_to_bytes(pub_key.n)
exponent=int_to_bytes(pub_key.e)
mod=str(hexlify(modulus).decode('utf-8'))
print("\nSignature-RSA-Modulus (n):\t"+mod)
print("Signature-RSA-Exponent (e):\t" + str(hexlify(exponent).decode('utf-8')))
res = test_key(modulus)
if res!="":
print("\n"+res+"\n!!!! We have a signing key, yay !!!!")
sha256 = hashlib.sha256()
sha256.update(modulus+exponent)
pubkey_hash=sha256.digest()
locked=pubkey_hash+struct.pack('<I',0x0)
unlocked = pubkey_hash + struct.pack('<I', 0x1)
sha256 = hashlib.sha256()
sha256.update(locked)
root_of_trust_locked=sha256.digest()
sha256 = hashlib.sha256()
sha256.update(unlocked)
root_of_trust_unlocked=sha256.digest()
print("\nTZ Root of trust (locked):\t\t" + str(hexlify(root_of_trust_locked).decode('utf-8')))
print("TZ Root of trust (unlocked):\t" + str(hexlify(root_of_trust_unlocked).decode('utf-8')))
if (args.inject==True):
pos = signature.find(target)
if (pos != -1):
lenpos = signature.find(struct.pack(">I",length)[0],pos)
if (lenpos!=-1):
with open(args.filename[0:-4]+"_signed.bin",'wb') as wf:
wf.write(data)
wf.write(signature[0:lenpos])
wf.write(struct.pack(">I",length))
wf.write(signature[lenpos+4:])
print("Successfully injected !")
if __name__ == "__main__":
main(sys.argv[1:])
| [
"[email protected]"
]
| |
c91b6d4d976b7babc5a3e71d20631f1d1e590f59 | 7c69c27a1c6ff2a1552900f4c1001281f4447233 | /codechef/cnote.py | 3694d9b77b4c2bf333e653fa8cf7792e0225184b | []
| no_license | Hamiltonxx/pyalgorithms | 894a0228928819601a816c472689ce96a11e1d25 | 92284f6105c5deb7f843ff299ee3ceb6382cf879 | refs/heads/master | 2023-09-04T13:01:46.465661 | 2023-09-02T05:50:23 | 2023-09-02T05:50:23 | 231,999,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | T = int(input())
for i in range(T):
X,Y,K,N = map(int, input().split())
left = X-Y
flag=0
for j in range(N):
P,C = map(int, input().split())
if left<=P and C<=K and flag==0:
flag=1
print("LuckyChef") if flag else print("UnluckyChef")
| [
"[email protected]"
]
| |
5e419e8f6008694b7ab2272f54bb1a47a63ce4d4 | b341a8d120737297aa8fd394a23633dac9b5ccda | /accounts/migrations/0002_remove_customuser_department.py | 491606403866741ecec2bdf0b085d4202f9d193b | []
| no_license | Minari766/disney_side_stories | 16d97cb02bf00aa5439d59f753abb9a4706a30aa | aa2d88b1b0fdd87a27f41318bd3ec7352229b6ff | refs/heads/main | 2023-08-15T07:03:16.922579 | 2021-10-03T07:47:22 | 2021-10-03T07:47:22 | 306,496,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | # Generated by Django 2.2.16 on 2021-01-13 14:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='customuser',
name='department',
),
]
| [
"[email protected]"
]
| |
3d2140dadd0c275c33161111b9edcf894ae45655 | cb65ef874d2427a1edcb132cda05e5ce2dc1aae4 | /modpy/stats/examples/example_hamiltonian.py | 56885508b9fcac1531048e82e79ab1919926985c | [
"MIT"
]
| permissive | FrederikLehn/modpy | 1395c27029f5fbfae2388cbd500b28e67a3cdb9e | 19ab18547e06e93fabfbd7f7b2f0f07ff0e70db3 | refs/heads/main | 2023-07-14T11:58:26.379687 | 2021-07-30T11:04:19 | 2021-07-30T11:04:19 | 390,731,060 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,567 | py | import numpy as np
from numpy.random import Generator, PCG64
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.patches import Patch
from modpy.stats import hamiltonian_mc
from modpy.stats._core import auto_correlation, auto_correlation_time
from modpy.plot.plot_util import cm_parula, default_color, set_font_sizes
from modpy.illustration.illustration_util import STATS_PATH
from modpy.optimize._derivatives import approx_difference
def _plot_MH_1D():
# example from: http://www.mit.edu/~ilkery/papers/MetropolisHastingsSampling.pdf
seed = 1234
gen = Generator(PCG64(seed))
n = 150
samples = 100000
mu = np.array([0., 0.])
rho = 0.45
sigma = np.array([(1., rho),
[rho, 1.]])
x1, x2 = gen.multivariate_normal(mu, sigma, n).T
rho_emp = np.corrcoef(x1, x2)[0, 1]
# bi-variate normal distribution with mu1=mu2=0 and sigma1=sigma2=1
def log_like(rho_):
p = 1. / (2. * np.pi * np.sqrt(1. - rho_ ** 2.)) * np.exp(-1. / (2. * (1. - rho_ ** 2.)) * (x1 ** 2 - 2. * rho_ * x1 * x2 + x2 ** 2.))
return np.sum(np.log(p))
# Jeffreys prior
def log_prior(rho_):
return np.log((1. / (1. - rho_ ** 2.)) ** 1.5)
def log_posterior(rho_):
return np.minimum(0., log_like(rho_) + log_prior(rho_))
# def log_like_grad(rho_):
# p = (rho_ * (rho_ + 1.) * (rho_ - 1.) + (x1 - rho_ * x2) * (rho_ * x1 - x2)) / ((rho_ + 1) * (rho_ - 1.) * (-rho ** 2. + 1))
# return np.sum(p)
rho0 = np.array([0.])
bounds = ((-1., 1.),)
res = hamiltonian_mc(log_posterior, rho0, samples, df='3-point', burn=1000, bounds=bounds, seed=seed, keep_path=True)
xp = res.x
# calculate auto-correlation and determine lag-time until independence.
lags = 100
auto_corr = auto_correlation(xp.flatten(), lags)
tau = auto_correlation_time(xp.flatten())
# sub-sample only uncorrelated samples
xp_ind = xp[::tau]
samples_ind = np.arange(0, samples, tau)
rho_sam = np.mean(xp_ind)
# plot problem plots -----------------------------------------------------------------------------------------------
# plot observations
# ax1.scatter(x1, x2, s=20, color=default_color(0))
# ax1.set_xlabel('$x_1$')
# ax1.set_ylabel('$x_2$')
# ax1.grid(True)
# ax1.set_title('Data')
# set_font_sizes(ax1, 12)
# plot the log-likelihood over the domain [-1, 1]
# k = 500
# rhos = np.linspace(-0.999, 0.999, k)
# L = np.array([log_like(r) for r in rhos])
#
# ax4.plot(rhos, L, color=default_color(0))
# ax4.set_xlabel('$\\rho$')
# ax4.set_ylabel('$\log(f(\\rho | x, y))$')
# ax4.grid(True)
# ax4.ticklabel_format(axis="y", style="sci", scilimits=(0, 0))
# ax4.set_title('Log-Likelihood')
# set_font_sizes(ax4, 12)
#
# # plot the gradient of the log-likelihood
# dL = np.array([log_like_grad(r) for r in rhos])
#
# ax5.plot(rhos, dL, color=default_color(0))
# ax5.set_xlabel('$\\rho$')
# ax5.set_ylabel('$\\frac{\partial}{\partial \\rho}\log(f(\\rho))$')
# ax5.grid(True)
# ax5.ticklabel_format(axis="y", style="sci", scilimits=(0, 0))
# ax5.set_title('Derivative of Log-Likelihood')
# set_font_sizes(ax5, 12)
# plot HMC behaviour plots -----------------------------------------------------------------------------------------
fig, axes = plt.subplots(2, 3, figsize=(20, 14))
ax1, ax2, ax3, ax4, ax5, ax6 = axes.flatten()
# plot Markov chain
ax1.plot(np.arange(samples), xp, color=default_color(0), label='Full')
ax1.plot(samples_ind, xp_ind, color=default_color(1), label='Thinned')
ax1.plot([0, samples], [rho, rho], 'k', label='True $\\rho$')
ax1.plot([0, samples], [rho_emp, rho_emp], color='m', label='Empirical $\\rho$')
ax1.plot([0, samples], [rho_sam, rho_sam], lw=2, color='orange', label='Sampled $\\rho$')
ax1.set_xlim([0, samples])
ax1.set_ylim([0.2, 0.7])
ax1.set_xlabel('Samples')
ax1.set_ylabel('$\\rho$')
ax1.legend(loc='upper right')
ax1.grid(True)
ax1.set_title('Markov Chain')
set_font_sizes(ax1, 12)
# plot histogram
hist = ax2.hist(xp, 50, facecolor=default_color(0))#, edgecolor='k', linewidth=0.2)
freq = hist[0]
max_freq = np.amax(freq) * 1.1
ax2.plot([rho, rho], [0, max_freq], color='k', label='True $\\rho$')
ax2.plot([rho_emp, rho_emp], [0, max_freq], color='m', label='Empirical $\\rho$')
ax2.plot([rho_sam, rho_sam], [0, max_freq], lw=2, color='orange', label='Sampled $\\rho$')
ax2.set_xlim([0.2, 0.7])
ax2.set_ylim([0., max_freq])
ax2.set_xlabel('$\\rho$')
ax2.set_ylabel('Frequency (ind.)')
ax2.grid(True)
ax2.set_title('Posterior Distribution')
set_font_sizes(ax2, 12)
ax2_1 = ax2.twinx()
ax2_1.hist(xp_ind, 50, facecolor=default_color(1), alpha=0.35) # , edgecolor='k', linewidth=0.2
ax2_1.set_ylabel('Frequency')
set_font_sizes(ax2_1, 12)
ax2.legend(handles=(Patch(color=default_color(0), label='Full'),
Patch(color=default_color(1), label='Thinned'),
Line2D([], [], color='k', label='True $\\rho$'),
Line2D([], [], color='m', label='Empirical $\\rho$'),
Line2D([], [], color='orange', label='Sampled $\\rho$')))
# plot the autocorrelation
ax3.plot(np.arange(lags), auto_corr, color=default_color(0), label='Auto-correlation')
ax3.plot([tau, tau], [-1., 1.], 'k--', label='Lag-time, $\\tau$')
ax3.set_xlim([0., lags])
ax3.set_ylim([-0.1, 1.])
ax3.set_xlabel('Lag')
ax3.set_ylabel('Auto-Correlation')
ax3.legend()
ax3.grid(True)
ax3.set_title('Auto-Correlation')
set_font_sizes(ax3, 12)
# plot the acceptance probability
ax4.plot(np.arange(res.path.accept.size), res.path.accept, color=default_color(0), label='$\delta$')
ax4.plot([0, res.path.accept.size], [0.65, 0.65], 'k--', label='$\delta_{target}$')
ax4.set_xlim([0, res.path.accept.size])
ax4.set_ylim([0., 1.])
ax4.set_xlabel('Samples (incl. burn-in)')
ax4.set_ylabel('Acceptance Ratio, $\delta$')
ax4.grid(True)
ax4.legend()
ax4.set_title('Acceptance Ratio')
set_font_sizes(ax4, 12)
# plot the step-size
ax5.plot(np.arange(res.path.step_size.size), res.path.step_size, color=default_color(0))
ax5.set_xlim([0, res.path.step_size.size])
ax5.set_ylim([0., None])
ax5.set_xlabel('Burn-in Samples')
ax5.set_ylabel('Step-size, $\\bar{\epsilon}$')
ax5.grid(True)
ax5.set_title('Step-size Adaptation')
set_font_sizes(ax5, 12)
# plot the integration steps
ax6.plot(np.arange(res.path.steps.size), res.path.steps, color=default_color(0))
ax6.set_xlim([0, res.path.steps.size])
ax6.set_ylim([0., None])
ax6.set_xlabel('Samples (incl. burn-in)')
ax6.set_ylabel('Integration Steps, $L$')
ax6.grid(True)
ax6.set_title('Integration Steps')
set_font_sizes(ax6, 12)
fig.savefig(STATS_PATH + '1D_performance_hamiltonian_MC.png')
def _plot_MH_2D():
seed = 1234
gen = Generator(PCG64(seed))
n = 150
samples = 100000
burn = 1000
mu = np.array([0., 0.])
sigma1 = 3.
sigma2 = 2.
rho = 0.9
cov = rho * sigma1 * sigma2
sigma = np.array([(sigma1 ** 2., cov),
[cov, sigma2 ** 2.]])
x1, x2 = gen.multivariate_normal(mu, sigma, n).T
s1_emp = np.std(x1)
s2_emp = np.std(x2)
# bi-variate normal distribution with mu1=mu2=0, known rho and unknown sigma1 and sigma2
def log_like(sigma_):
s1, s2 = sigma_
p = 1. / (2. * np.pi * s1 * s2 * np.sqrt(1. - rho ** 2.)) * np.exp(-1. / (2. * (1. - rho ** 2.)) * ((x1 / s1) ** 2 - 2. * rho * (x1 / s1) * (x2 / s2) + (x2 / s2) ** 2.))
return np.sum(np.log(p))
def log_prior(sigma_):
s1, s2 = sigma_
p = 1. / (2. * np.pi * s1 * s2) * np.exp(-1. / 2 * ((x1 / s1) ** 2 + (x2 / s2) ** 2.))
return np.sum(np.log(p))
def log_posterior(sigma_):
return np.minimum(0., log_like(sigma_) + log_prior(sigma_))
sigma0 = np.array([1.1, 1.1])
bounds = ((1., None), (1., None))
res = hamiltonian_mc(log_posterior, sigma0, samples, df='3-point', burn=burn, bounds=bounds, seed=seed, keep_path=True)
xp = res.x
# calculate auto-correlation and determine lag-time until independence.
lags = 100
auto_corr1 = auto_correlation(xp[:, 0], lags)
auto_corr2 = auto_correlation(xp[:, 1], lags)
tau1 = auto_correlation_time(xp[:, 0])
tau2 = auto_correlation_time(xp[:, 1])
tau = np.maximum(tau1, tau2)
# sub-sample only uncorrelated samples
xp_ind = xp[::tau, :]
s1_sam = np.mean(xp_ind[:, 0])
s2_sam = np.mean(xp_ind[:, 1])
# plot problem plots -----------------------------------------------------------------------------------------------
# # plot observations
# ax1 = fig.add_subplot(r, c, 1)
# ax1.scatter(x1, x2, s=20, color=default_color(0))
# ax1.set_xlabel('$x_1$')
# ax1.set_ylabel('$x_2$')
# ax1.grid(True)
# ax1.set_title('Data')
# set_font_sizes(ax1, 12)
# # plot the likelihood over the domain
# ng = 250
# ng2 = ng ** 2
# s1_ = np.linspace(1., 5., ng)
# s2_ = np.linspace(1., 5., ng)
# S1, S2 = np.meshgrid(s1_, s2_)
# S = [S1.flatten(), S2.flatten()]
#
# # calculate likelihood
# L = np.zeros((ng2,))
# for i in range(ng2):
# L[i] = log_like([S[0][i], S[1][i]])
#
# L = np.reshape(L, (ng, ng))
#
# ax4 = fig.add_subplot(r, c, 4, projection='3d')
# ax4.plot_surface(S1, S2, L, cmap=cm_parula, edgecolors='k', lw=0.2)
#
# ax4.set_xlabel('$\sigma_1$')
# ax4.set_ylabel('$\sigma_2$')
# ax4.set_zlabel('$\log(f(\\rho | x, y))$')
# ax4.grid(True)
# ax4.set_title('Log-Likelihood')
# ax4.ticklabel_format(axis="z", style="sci", scilimits=(0, 0))
# set_font_sizes(ax4, 12)
# # calculate prior probability
# pri = np.zeros((ng2,))
# for i in range(ng2):
# pri[i] = log_like_grad(np.array([S[0][i], S[1][i]]))
#
# pri = np.reshape(pri, (ng, ng))
#
# ax5 = fig.add_subplot(r, c, 5, projection='3d')
# ax5.plot_surface(S1, S2, pri, cmap=cm_parula, edgecolors='k', lw=0.2)
#
# ax5.set_xlabel('$\sigma_1$')
# ax5.set_ylabel('$\sigma_2$')
# ax5.set_zlabel('$\log(f(\\rho))$')
# ax5.grid(True)
# ax5.set_title('Log-Prior Probability')
# ax5.ticklabel_format(axis="z", style="sci", scilimits=(0, 0))
# set_font_sizes(ax5, 12)
# plot HMC behaviour plots -----------------------------------------------------------------------------------------
r, c = 2, 3
fig = plt.figure(figsize=(20, 14))
# plot markov chain
ax1 = fig.add_subplot(r, c, 1)
ax1.plot(xp[:, 0], xp[:, 1], color=default_color(0), label='Full')
ax1.plot(xp_ind[:, 0], xp_ind[:, 1], color=default_color(1), label='Thinned')
ax1.plot(sigma1, sigma2, color='k', marker='o', ls='', ms=8, label='True $(\sigma_1, \sigma_2)$')
ax1.plot(s1_emp, s2_emp, color='m', marker='o', ls='', ms=8, label='Empirical $(\sigma_1, \sigma_2)$')
ax1.plot(s1_sam, s2_sam, color='g', marker='o', ls='', ms=8, label='Sampled $(\sigma_1, \sigma_2)$')
ax1.set_xlim([2., 4.5])
ax1.set_ylim([1.4, 2.7])
ax1.set_xlabel('$\sigma_1$')
ax1.set_ylabel('$\sigma_2$')
ax1.grid(True)
ax1.legend()
ax1.set_title('Markov Chain')
set_font_sizes(ax1, 12)
# plot histogram
cmap = cm_parula
ax2 = fig.add_subplot(r, c, 2)
ax2.hist2d(xp[:, 0], xp[:, 1], 100, cmap=cmap, range=[[2., 4.5], [1.4, 2.7]])
ax2.plot(sigma1, sigma2, color='k', marker='o', ls='', ms=8)
ax2.plot(s1_emp, s2_emp, color='m', marker='o', ls='', ms=8)
ax2.plot(s1_sam, s2_sam, color='g', marker='o', ls='', ms=8)
ax2.set_xlim([2., 4.5])
ax2.set_ylim([1.4, 2.7])
ax2.set_xlabel('$\sigma_1$')
ax2.set_ylabel('$\sigma_2$')
ax2.grid(True)
ax2.set_title('Posterior Distribution')
set_font_sizes(ax2, 12)
# plot the autocorrelation
ax3 = fig.add_subplot(r, c, 3)
ax3.plot(np.arange(lags), auto_corr1, color=default_color(0), label='Auto-corr, $\sigma_1$')
ax3.plot(np.arange(lags), auto_corr2, color=default_color(1), label='Auto-corr, $\sigma_2$')
ax3.plot([tau, tau], [-1., 1.], 'k--', label='Lag-time, $\\tau$')
ax3.set_xlim([0, lags])
ax3.set_ylim([-0.1, 1.])
ax3.set_xlabel('Lag')
ax3.set_ylabel('Auto-Correlation')
ax3.grid(True)
ax3.legend()
ax3.set_title('Auto-Correlation')
set_font_sizes(ax3, 12)
# plot the acceptance probability
ax4 = fig.add_subplot(r, c, 4)
ax4.plot(np.arange(res.path.accept.size), res.path.accept, color=default_color(0), label='$\delta$')
ax4.plot([0, res.path.accept.size], [0.65, 0.65], 'k--', label='$\delta_{target}$')
ax4.set_xlim([0, res.path.accept.size])
ax4.set_ylim([0., 1.])
ax4.set_xlabel('Samples (incl. burn-in)')
ax4.set_ylabel('Acceptance Ratio, $\delta$')
ax4.grid(True)
ax4.legend()
ax4.set_title('Acceptance Ratio')
set_font_sizes(ax4, 12)
# plot the step-size
ax5 = fig.add_subplot(r, c, 5)
ax5.plot(np.arange(res.path.step_size.size), res.path.step_size, color=default_color(0))
ax5.set_xlim([0, res.path.step_size.size])
ax5.set_ylim([0., None])
ax5.set_xlabel('Burn-in Samples')
ax5.set_ylabel('Step-size, $\\bar{\epsilon}$')
ax5.grid(True)
ax5.set_title('Step-size Adaptation')
set_font_sizes(ax5, 12)
# plot the integration steps
ax6 = fig.add_subplot(r, c, 6)
ax6.plot(np.arange(res.path.steps.size), res.path.steps, color=default_color(0))
ax6.set_xlim([0, res.path.steps.size])
ax6.set_ylim([0., None])
ax6.set_xlabel('Samples (incl. burn-in)')
ax6.set_ylabel('Integration Steps, $L$')
ax6.grid(True)
ax6.set_title('Integration Steps')
set_font_sizes(ax6, 12)
fig.savefig(STATS_PATH + '2D_performance_hamiltonian_MC.png')
if __name__ == '__main__':
_plot_MH_1D()
_plot_MH_2D()
| [
"[email protected]"
]
| |
616cc832511e0d987c803eef500c6c3d52031364 | d8b201ba6bf57db0101d88836429bbcb3a10b857 | /Debugging/WordsScore.py | 498ac004178cda6a9c940b240758bb48d0b5f954 | [
"MIT"
]
| permissive | MaxCodeXTC/PythonHackerRankSolutions | 32ad41df3fbd33f8651cdc5099c8ec3d37d9bc17 | 987618b61b71fe5e9a40275fb348476657bbea57 | refs/heads/master | 2022-06-28T06:00:19.126751 | 2020-05-07T09:23:37 | 2020-05-07T09:23:37 | 262,471,271 | 1 | 0 | null | 2020-05-09T02:24:11 | 2020-05-09T02:24:10 | null | UTF-8 | Python | false | false | 129 | py | '''
Title : Words Score
Subdomain : Debugging
Domain : Python
Author : codeperfectplus
Created : 17 January 2020
'''
| [
"[email protected]"
]
| |
c275f43eb61c8eb74f5a97d674bd1f452c0c7b93 | 386a5b505d77c9798aaab78495d0f00c349cf660 | /Prognos Project/Working/Latiket Jaronde Git/DJango examples/DynamicUrls/urlDemo/views.py | bd2b5dba4a0956ad113487a7d28a7ebb91ba1c86 | []
| no_license | namratarane20/MachineLearning | 2da2c87217618d124fd53f607c20641ba44fb0b7 | b561cc74733b655507242cbbf13ea09a2416b9e2 | refs/heads/master | 2023-01-20T18:54:15.662179 | 2020-03-09T14:12:44 | 2020-03-09T14:12:44 | 237,597,461 | 0 | 0 | null | 2023-01-05T12:37:12 | 2020-02-01T10:22:20 | Python | UTF-8 | Python | false | false | 897 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def home(request, name=""):
l = [{'a': 1, 'b': 2, 'c': 3}, {'a': 11, 'b': 22, 'c': 33}, {'a': 111, 'b': 222, 'c': 333}]
if name == "":
data = ['latiket', 'akash', 10, 20]
name = "Dom"
context = {"msg": "welcome ", "data": data, "name": name, 'l': l}
return render(request, "urlDemo/home.html", context)
else:
data = [10, 20]
context = {"msg": "welcome ", "data": data, "name": name, 'first': True, 'l': l}
return render(request, "urlDemo/second.html", context)
def out(request, name, d):
print("inside out")
print("d1 = ", d)
data = ['latiket', 'akash', 10, 20]
context = {"msg": "welcome ", "data": data, "number": d, "name": name, 'first': False}
return render(request, "urlDemo/second.html", context)
| [
"[email protected]"
]
| |
1758849bf2661b3f1dcfd1af37f68a1e02729240 | d41d18d3ea6edd2ec478b500386375a8693f1392 | /plotly/validators/parcoords/dimension/_templateitemname.py | a6797fb9536cd704210159e36fd5b521aea5421a | [
"MIT"
]
| permissive | miladrux/plotly.py | 38921dd6618650d03be9891d6078e771ffccc99a | dbb79e43e2cc6c5762251537d24bad1dab930fff | refs/heads/master | 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 | MIT | 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null | UTF-8 | Python | false | false | 474 | py | import _plotly_utils.basevalidators
class TemplateitemnameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='templateitemname',
parent_name='parcoords.dimension',
**kwargs
):
super(TemplateitemnameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
role='info',
**kwargs
)
| [
"[email protected]"
]
| |
fc701644c46768b290144139c8e8bd03e9cfb9b7 | a8e132de33ff576205811432052265bfca1a8413 | /unified_social_api/exceptions.py | beace5f272fa3563486ff355401cf417b58f937f | [
"MIT"
]
| permissive | kanishkarj/unified-social-api | 4b1f81026d0772a9d399b2db39b0a5924e02946b | 85bfd566d18087d0a526288bd297059b1e3dcb74 | refs/heads/master | 2021-09-04T03:55:02.226635 | 2018-01-15T15:17:16 | 2018-01-15T15:17:16 | 117,405,090 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | class UnifiedSocialAPIError(Exception):
pass
class NotFoundError(UnifiedSocialAPIError):
pass
| [
"[email protected]"
]
| |
0f149c6acecb80330e3c79511180324dbce155fb | 2aba62d66c2c622bdc148cef451da76cae5fd76c | /exercise/learn_python_dm2039/ch30/ch30_17.py | b986aa91ea8f242a52bc9f62580e37d7407553c6 | []
| no_license | NTUT-109AB8011/crawler | 6a76de2ab1848ebc8365e071e76c08ca7348be62 | a703ec741b48d3af615a757fed7607b1f8eb66a6 | refs/heads/master | 2023-03-26T22:39:59.527175 | 2021-03-30T03:29:22 | 2021-03-30T03:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | # ch30_17.py
import threading
import time
def worker():
print(threading.currentThread().getName(), 'Starting')
time.sleep(3)
print(threading.currentThread().getName(), 'Exiting')
w = threading.Thread(name='worker',target=worker)
w.start()
print('start join')
w.join(1.5) # 等待worker執行緒1.5秒工作完成才往下執行
print("是否working執行緒仍在工作 ? ", w.isAlive())
time.sleep(2) # 主執行緒休息2秒
print("是否working執行緒仍在工作 ? ", w.isAlive())
print('end join')
| [
"[email protected]"
]
| |
93ca2f8487f174dab4e789d314ee88d24e2f3ce9 | 247508a09bbcd08f75de7c85118caf857941f9dd | /python/lvmscp/actor/commands/focus.py | df0a3a6a39a0203eecff28d2afa3789178f3787b | [
"BSD-3-Clause"
]
| permissive | sdss/lvmscp | 051b6aad20d2a184ac046be086331fa06f2d3fa2 | f74d83997cbba01a0c5b55615fbe6dbf0572b8c9 | refs/heads/main | 2023-09-02T19:47:20.394310 | 2023-08-31T19:32:15 | 2023-08-31T19:32:15 | 348,923,320 | 2 | 0 | BSD-3-Clause | 2022-04-11T02:59:57 | 2021-03-18T03:03:12 | Python | UTF-8 | Python | false | false | 3,827 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: José Sánchez-Gallego ([email protected])
# @Date: 2022-05-14
# @Filename: focus.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from __future__ import annotations
from typing import TYPE_CHECKING
import click
from archon.actor.commands import parser
if TYPE_CHECKING:
from archon.controller import ArchonController
from ..actor import CommandType
__all__ = ["focus"]
async def move_hds(
command: CommandType,
spectro: str,
side: str = "all",
action: str = "open",
verbose: bool = False,
):
"""Helper to open/close HDs."""
if verbose:
if action == "open":
command.info(f"Opening {side} Hartmann door(s).")
else:
command.info(f"Closing {side} Hartmann door(s).")
hd_cmd = await (
await command.send_command("lvmieb", f"hartmann {action} -s {side} {spectro}")
)
if hd_cmd.status.did_fail:
command.fail(
"Failed moving Hartmann doors. See lvmieb log for more information."
)
return False
return True
# TODO: needs rewriting for different specs.
@parser.command()
@click.argument("SPECTRO", type=click.Choice(["sp1", "sp2", "sp3"]))
@click.argument("EXPTIME", type=float)
@click.option("-n", "--count", type=int, default=1, help="Number of focus cycles.")
@click.option("--dark", flag_value=True, help="Take a dark along each exposure.")
async def focus(
command: CommandType,
controllers: dict[str, ArchonController],
spectro: str,
exptime: float,
count: int = 1,
dark: bool = False,
):
"""Take a focus sequence with both Hartmann doors."""
# TODO: add a check for arc lamps or, better, command them to be on.
for n in range(count):
if count != 1:
command.info(f"Focus iteration {n+1} out of {count}.")
for side in ["left", "right"]:
# Open both HDs.
if not (await move_hds(command, spectro, "all", "open", verbose=False)):
return
# Close HD.
if not (await move_hds(command, spectro, side, "close", verbose=True)):
return
# Arc exposure.
command.info("Taking arc exposure.")
expose_cmd = await command.send_command(
"lvmscp", f"expose --arc -c {spectro} {exptime}"
)
await expose_cmd
if expose_cmd.status.did_fail:
return command.fail("Failed taking arc exposure.")
filenames = []
for reply in expose_cmd.replies:
if "filenames" in reply.message:
filenames += reply.message["filenames"]
dark_filenames = []
if dark:
# Dark exposure, if commanded.
command.info("Taking dark exposure.")
dark_cmd = await command.send_command(
"lvmscp", f"expose --dark -c {spectro} {exptime}"
)
await dark_cmd
if dark_cmd.status.did_fail:
return command.fail("Failed taking arc exposure.")
for reply in dark_cmd.replies:
if "filenames" in reply.message:
dark_filenames += reply.message["filenames"]
command.info(
focus={
"spectrograph": spectro,
"iteration": n + 1,
"side": side,
"exposures": filenames,
"darks": dark_filenames,
}
)
# Reopen HDs.
command.info("Reopening Hartmann doors.")
if not (await move_hds(command, spectro, "all", "open", verbose=False)):
return
command.finish()
| [
"[email protected]"
]
| |
c58b535953582454e0c1d3cc1dbbab28db5bf736 | 4a8c1f7d9935609b780aff95c886ef7781967be0 | /atcoder/AOJ/id1130.py | 4af6ca8e627dfbd5e251b8aee1fa908bddc0e8e3 | []
| no_license | recuraki/PythonJunkTest | d5e5f5957ac5dd0c539ef47759b1fe5ef7a2c52a | 2556c973d468a6988d307ce85c5f2f8ab15e759a | refs/heads/master | 2023-08-09T17:42:21.875768 | 2023-07-18T23:06:31 | 2023-07-18T23:06:31 | 13,790,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,682 | py |
def do():
while True:
from collections import deque
w, h = map(int, input().split())
if w == h == 0: break
maze = []
curh, curw = -1, -1
maze.append("#" * (w+2))
for hh in range(h):
l = list("#" + input() + "#")
if l.count("@"): curh, curw = hh + 1, l.index("@")
maze.append(l)
maze.append("#" * (w+2))
maze[curh][curw] = "."
q = deque([(curh, curw)])
dh = [-1, 0, 0, 1]
dw = [0, -1, 1, 0]
ans = 0
while len(q) > 0:
curh, curw = q.popleft()
if maze[curh][curw] == "#": continue
maze[curh][curw] = "#"
ans += 1
for di in range(len(dh)):
nexth, nextw = curh + dh[di], curw + dw[di]
if maze[nexth][nextw] == "#": continue
q.append( (nexth, nextw) )
print(ans)
def do2():
dh = [-1, 0, 0, 1]
dw = [0, -1, 1, 0]
while True:
w, h = map(int, input().split())
if w == h == 0: break
maze = []
curh, curw = -1, -1
maze.append("#" * (w + 2))
for hh in range(h):
l = list("#" + input() + "#")
if l.count("@"): curh, curw = hh + 1, l.index("@")
maze.append(l)
maze.append("#" * (w + 2))
def search(h, w):
if maze[h][w] == "#": return 0
ans = 1
maze[h][w] = "#"
for di in range(len(dh)):
ans += search(h + dh[di], w + dw[di])
return ans
print(search(curh, curw))
#do()
do2() | [
"[email protected]"
]
| |
7f31e3454fffeba9d60042466e01d28db7bf7dcd | 7cbcef1abbc76c43e2dd094bfe51f81fba8b0e9a | /03_Computer_Vision_OpenCV/01_Document_Scanner/01_document_scanner.py | c09f13abf132d8705185ace2110593b342c9f948 | []
| no_license | SimonSlominski/Data_Science | 10fd5ca3bba8718b19804200c8f14e241e1e78b2 | 5cab52be83effc9e0b9a86888cedcd836dd00980 | refs/heads/master | 2021-05-17T11:19:11.509588 | 2020-06-15T13:44:47 | 2020-06-15T13:44:47 | 250,752,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,914 | py | """
Detection problems may occur if the background of the image is bright
"""
from numpy.linalg import norm
from skimage.filters import threshold_local
import numpy as np
import imutils
import cv2
image = cv2.imread('images/paragon_1.jpg')
# Image size standardization
# Keep a copy of the original image for later transformations
original_image = image.copy()
# Keep the original image's aspect ratio
ratio = image.shape[0] / 500.0
# Resize up to 500 px. From (600, 450, 3) to (500m 375, 3)
image = imutils.resize(image, height=500)
# Image conversion to grayscale
gray_image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Edge detection
edges = cv2.Canny(gray_image, threshold1=75, threshold2=200)
# Add blur
gray_image = cv2.GaussianBlur(gray_image, ksize=(5, 5), sigmaX=0)
# Find contours
contours = cv2.findContours(image=edges.copy(),
mode=cv2.RETR_LIST,
method=cv2.CHAIN_APPROX_SIMPLE)
contours = imutils.grab_contours(contours)
contours = sorted(contours, key=cv2.contourArea, reverse=True)[:5]
# Finding the document outline
screen_contour = None
for contour in contours:
# calculate the perimeter of each figure found
perimeter = cv2.arcLength(curve=contour, closed=True)
# approximation of the rectangle curve
approx = cv2.approxPolyDP(curve=contour, epsilon=0.02 * perimeter, closed=True)
if len(approx) == 4:
screen_contour = approx
break
# Display found vertices
# vertices = cv2.drawContours(image, contours=screen_contour, contourIdx=-1, color=(0, 255, 0), thickness=10)
# Extraction of vertices
points = screen_contour.reshape(4, 2)
points = points * ratio
# Create empty numpy array
rectangle = np.zeros((4, 2), dtype='float32')
total = points.sum(axis=1)
rectangle[0] = points[np.argmin(total)]
rectangle[2] = points[np.argmax(total)]
difference = np.diff(points, axis=1)
rectangle[1] = points[np.argmin(difference)]
rectangle[3] = points[np.argmax(difference)]
a, b, c, d = rectangle
width1 = norm(c - d)
width2 = norm(b - a)
max_width = max(int(width1), int(width2))
height1 = norm(b - c)
height2 = norm(a - d)
max_height = max(int(height1), int(height2))
vertices = np.array([
[0, 0],
[max_width -1, 0],
[max_width -1, max_height - 1],
[0, max_height - 1]
], dtype='float32')
# Transformation matrix 3x3
M = cv2.getPerspectiveTransform(rectangle, vertices)
# Transfer of document to image
out = cv2.warpPerspective(src=original_image, M=M, dsize=(max_width, max_height))
# To grayscale
out = cv2.cvtColor(out, cv2.COLOR_RGB2GRAY)
# Calculation of the threshold mask based on the proximity of pixels
T = threshold_local(image=out, block_size=11, offset=10, method='gaussian')
out = (out > T).astype('uint8') * 255
cv2.imshow('img', out)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
]
| |
f74cc4930dcd25bf2e86b5bc9d77e8b29a9297ba | d5be74d2de6fa0ded61d6c3ee7c91a403c0f90db | /quantarhei/qm/liouvillespace/rates/foersterrates.py | 36cb1e2bc487d09ec05efb6449d60fc918149f64 | [
"MIT"
]
| permissive | tmancal74/quantarhei | 43cf9d4be857b8e6db1274ebb8a384f1545cd9ad | fa3042d809005d47106e53609e6a63aa780c477c | refs/heads/master | 2023-05-11T06:57:36.368595 | 2023-05-02T13:10:18 | 2023-05-02T13:10:18 | 63,804,925 | 20 | 22 | MIT | 2022-12-21T14:10:00 | 2016-07-20T18:30:25 | Python | UTF-8 | Python | false | false | 5,021 | py | # -*- coding: utf-8 -*-
import numpy
import scipy.interpolate as interp
from ...hilbertspace.hamiltonian import Hamiltonian
from ...liouvillespace.systembathinteraction import SystemBathInteraction
from ...corfunctions.correlationfunctions import c2g
class FoersterRateMatrix:
"""Förster relaxation rate matrix
Förster population relaxation rate matrix is calculated from the
Hamiltonian and system-system bath interation.
Parameters
----------
ham : Hamiltonian
Hamiltonian object
sbi : SystemBathInteraction
SystemBathInteraction object
initialize : bool (default True)
If true, the rates will be calculated when the object is created
cutoff_time : float
If cutoff time is specified, the tensor is integrated only up to the
cutoff time
"""
def __init__(self, ham, sbi, initialize=True, cutoff_time=None):
if not isinstance(ham, Hamiltonian):
raise Exception("First argument must be a Hamiltonian")
if not isinstance(sbi, SystemBathInteraction):
raise Exception("Second argument must be a SystemBathInteraction")
self._is_initialized = False
self._has_cutoff_time = False
if cutoff_time is not None:
self.cutoff_time = cutoff_time
self._has_cutoff_time = True
self.ham = ham
self.sbi = sbi
if initialize:
self.initialize()
self._is_initialized = True
def initialize(self):
HH = self.ham.data
Na = self.ham.dim
sbi = self.sbi
tt = sbi.TimeAxis.data
# line shape functions
gt = numpy.zeros((Na, sbi.TimeAxis.length),
dtype=numpy.complex64)
# SBI is defined with "sites"
for ii in range(1, Na):
gt[ii,:] = c2g(sbi.TimeAxis, sbi.CC.get_coft(ii-1,ii-1))
# reorganization energies
ll = numpy.zeros(Na)
for ii in range(1, Na):
ll[ii] = sbi.CC.get_reorganization_energy(ii-1,ii-1)
self.data = _reference_implementation(Na, HH, tt, gt, ll)
def _reference_implementation(Na, HH, tt, gt, ll):
"""Reference implementation of Foerster rates
Calculate the rates between specified sites using standard Foerster
theory.
Reference:
L. Valkunas, D. Abramavicius, and T. Mančal, Molecular Excitation
Dynamics and Relaxation, Wiley-VCH, Berlin (2013), page:
Parameters
----------
Na : integer
Number of sites in the problem (rank of the rate matrix)
HH : float array
Hamiltonian matrix
tt : float array
Time points in which the line shape functions are given
gt : complex array
Line shape functions values at give time points.
First index corresponds to the site, the second to the time point
ll : array
Reorganization energies on sites
Returns
-------
KK : float array
Rate matrix with zeros on the diagonal
"""
#
# Rates between states a and b
#
KK = numpy.zeros((Na,Na), dtype=numpy.float64)
for a in range(Na):
for b in range(Na):
if a != b:
ed = HH[b,b] # donor
ea = HH[a,a] # acceptor
KK[a,b] = (HH[a,b]**2)*_fintegral(tt, gt[a,:], gt[b,:],
ed, ea, ll[b])
#
# depopulation rates
#
Kaa = 0.0
for a in range(Na):
Kaa = numpy.sum(KK[:,a])
KK[a,a] = -Kaa
return KK
def _fintegral(tt, gtd, gta, ed, ea, ld):
"""Foerster integral
Parameters
----------
tt : numpy array
Time
gtd : numpy array
lineshape function of the donor transition
gta : numpy array
lineshape function of the acceptor transition
ed : float
Energy of the donor transition
ea : float
Energy of the acceptor transition
ld : float
Reorganization energy of the donor
Returns
-------
ret : float
The value of the Foerster integral
"""
#fl = numpy.exp(-gtd +1j*(ed-2.0*ld)*tm.data)
#ab = numpy.exp(-gta -1j*ea*tm.data)
#prod = ab*fl
prod = numpy.exp(-gtd-gta +1j*((ed-ea)-2.0*ld)*tt)
preal = numpy.real(prod)
pimag = numpy.imag(prod)
splr = interp.UnivariateSpline(tt,
preal, s=0).antiderivative()(tt)
spli = interp.UnivariateSpline(tt,
pimag, s=0).antiderivative()(tt)
hoft = splr + 1j*spli
ret = 2.0*numpy.real(hoft[len(tt)-1])
return ret
| [
"[email protected]"
]
| |
8ed1850e0eac4651d703f9597c4758f2e6abce1e | 4309919e2361b3e6364fac19fed1e5c40bb6a038 | /yowsup_celery/exceptions.py | 8d667832be294372d4000c9d7a62cb8956c25b17 | [
"ISC"
]
| permissive | astamiviswakarma/yowsup-celery | 85b0422a393afa2b3aebb94198bc0a4812b237ed | 75f8edf8832ab1c3370e58a049a2e74e1691276e | refs/heads/master | 2021-07-18T14:41:48.695265 | 2019-01-23T19:25:07 | 2019-01-23T19:25:07 | 135,594,100 | 0 | 0 | ISC | 2018-09-13T08:11:39 | 2018-05-31T14:23:57 | Python | UTF-8 | Python | false | false | 710 | py | # -*- coding: utf-8 -*-
class YowsupCeleryError(Exception):
pass
class UnexpectedError(YowsupCeleryError):
""" Raised for unknown or unexpected errors. """
pass
class ConfigurationError(YowsupCeleryError):
"""
Raised when YowsupStack detects and error in configurations
"""
pass
class ConnectionError(YowsupCeleryError):
"""
Raised when CeleryLayer tries to perform an action which requires to be
connected to WhatsApp
"""
pass
class AuthenticationError(YowsupCeleryError):
"""
Raised when YowsupStack cannot authenticate with the whatsapp. This means the
password for number is incorrect. Check if registration was correct
"""
pass
| [
"[email protected]"
]
| |
1717287b96485ca0b2e569fabca1eae0984ef0fa | 647efc6a8ab5511e30cccb1f3f3af697acc83bcc | /queue_sample.py | c94eb18928417f75008174b195a16175af637880 | []
| no_license | syuuhei-yama/python_01 | 5e244160b12e4023637220e0cfa4a1318f70d265 | e94bc7d9c27bc3ae9cc66a7f87e2de13cc5efaae | refs/heads/master | 2022-12-02T20:14:08.550739 | 2020-08-24T08:46:18 | 2020-08-24T08:46:18 | 289,867,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | #Queue
from queue import Queue
q = Queue(maxsize=3)
print(q.qsize())
print(q.queue)
print(q.empty())
print(q.full())
q.put('A')
q.put('B')
q.put('C')
q.put_nowait('D')
print(q.qsize())
print(q.queue)
print(q.empty())
print(q.full())
#var = q.get()
#print(var)
#print(q.queue)
#var = q.get()
#var = q.get()
#var = q.get()
#print('処理終了')
| [
"[email protected]"
]
| |
ccb008ca1217d6ee113dab2c527a5a986495c0e0 | ab4b08284590c3dd2d09f7de2adc35943a3b59f9 | /yqc_huoerguosi_spider/yqc_huoerguosi_spider/settings.py | 6527b1acc25a876e774a16bc5715d8646f22ac3b | []
| no_license | james-hadoop/JamesScrapy | bfe71dc837a2cc61b5eee3b953f8a5b35a40820d | cbf3e230e919da1cfb76ba0d741440206c39e4cf | refs/heads/master | 2020-08-28T23:46:49.816208 | 2020-06-28T23:54:09 | 2020-06-28T23:54:09 | 217,856,936 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,241 | py | # -*- coding: utf-8 -*-
# Scrapy settings for yqc_huoerguosi_spider project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'yqc_huoerguosi_spider'
SPIDER_MODULES = ['yqc_huoerguosi_spider.spiders']
NEWSPIDER_MODULE = 'yqc_huoerguosi_spider.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'yqc_huoerguosi_spider (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'yqc_huoerguosi_spider.middlewares.YqcHuoerguosiSpiderSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'yqc_huoerguosi_spider.middlewares.YqcHuoerguosiSpiderDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'yqc_huoerguosi_spider.pipelines.YqcHuoerguosiSpiderPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"james@JamesUbuntu"
]
| james@JamesUbuntu |
bdec9615902bc5bbbe3192556bcbc5bf9f5710d7 | 7f763d7c2289e0dcbcc01073f38ea11706736ed7 | /HackerNews/plugin.py | 27d9b8720aec36b4808d78d8e7003e3883d034b8 | []
| no_license | davidsedlar/LemongrabBot | ee3662a506dcbf6c6bfea0decd00044dd0e40ea9 | 37e18bc54554394ea3afa60fa168414e43ce0a99 | refs/heads/master | 2021-01-15T12:42:21.857132 | 2014-12-31T20:42:43 | 2014-12-31T20:42:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,080 | py | import json
import urllib
import urllib2
import re
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
class HackerNews(callbacks.Plugin):
"""Add the help for "@plugin help HackerNews" here
This should describe *how* to use this plugin."""
threaded = True
def _shortenUrl(self, url):
posturi = "https://www.googleapis.com/urlshortener/v1/url"
headers = {'Content-Type' : 'application/json'}
data = {'longUrl' : url}
# if google news is up, safe to assume this is also up?
data = json.dumps(data)
request = urllib2.Request(posturi,data,headers)
response = urllib2.urlopen(request)
response_data = response.read()
shorturi = json.loads(response_data)['id']
return shorturi
# smart_truncate from http://stackoverflow.com/questions/250357/smart-truncate-in-python
def _smart_truncate(self, text, length, suffix='...'):
"""Truncates `text`, on a word boundary, as close to
the target length it can come.
"""
slen = len(suffix)
pattern = r'^(.{0,%d}\S)\s+\S+' % (length-slen-1)
if len(text) > length:
match = re.match(pattern, text)
if match:
length0 = match.end(0)
length1 = match.end(1)
if abs(length0+slen-length) < abs(length1+slen-length):
return match.group(0) + suffix
else:
return match.group(1) + suffix
return text
def hackernews(self, irc, msg, args, optlist):
"""[--newest|--latest|--best|--ask] type of headlines to display.
Display top hackernews.com headlines.
"""
hnposts = "latest"
#for (key, value) in optlist:
# if key == 'newest':
# hnposts = "newest"
# if key == 'latest':
# hnposts = "latest"
# if key == 'best':
# hnposts = "best"
# if key == 'ask':
# hnposts = "ask"
api_url = "http://hackernews-frontend.appspot.com/%s/format/json/limit/5" % hnposts
self.log.info(api_url)
response = urllib2.urlopen(api_url)
data = response.read().decode('latin-1')
jsondata = json.loads(data)
#self.log.info(json.dumps(jsondata, indent=2))
items = jsondata['items']
#entries = sorted(items, key=items['comments'], reverse=True)
for item in items:
title = item['title']
url = self._shortenUrl(item['url'])
score = item['score']
user = item['user']
comments = item['comments']
time = item['time']
item_id = item['item_id']
irc.reply(title + " " + url)
hackernews = wrap(hackernews, [getopts({'newest': '','latest': '','best': '','ask': ''})])
Class = HackerNews
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=200:
| [
"[email protected]"
]
| |
1f4bbcaf1f8e581b86c481a3c4d929a82dbca5c2 | 45abeaad9890863c6d1ad849d7bbc3780d9cefa1 | /levelstates.py | fa8bb2380f8fdbea4236bf6077d6024ade9ad06c | []
| no_license | morganq/picketpuzzle | 2af354a0b1ec2d151713557ade4ff0833b153597 | 84a0185a0a4323b1b53f9eb2f7e97306cf533bd0 | refs/heads/main | 2023-01-20T23:54:40.879072 | 2020-11-27T19:08:36 | 2020-11-27T19:08:36 | 307,177,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,923 | py | from states import State
import framesprite
import worker
import game
import text
import csv
import math
import pygame
import sound
from resources import resource_path
def autotile_around(scene, cx, cy):
w = len(scene.tilemap._grid[0])
h = len(scene.tilemap._grid)
for x in range(max(cx - 1,0), min(cx + 2, w)):
for y in range(max(cy - 1,0), min(cy + 2,h)):
if scene.tilemap._grid[y][x] in game.ROADTILES:
autotile(scene, x, y)
def autotile(scene, gx, gy):
w = len(scene.tilemap._grid[0])
h = len(scene.tilemap._grid)
def is_road(x,y):
if x < 0 or y < 0 or x >= w or y >= h:
return False
else:
return scene.tilemap._grid[y][x] in game.ROADTILES + [16]
around = (
is_road(gx - 1, gy), is_road(gx, gy - 1), is_road(gx + 1, gy), is_road(gx, gy + 1)
)
if around in AUTODEF:
scene.tilemap.set_tile(gx, gy, AUTODEF[around])
DIRS_FROM_OFFSETS = {
(1,0):0, (0,1):1, (-1,0):2, (0,-1):3
}
OFFSETS_FROM_DIRS = {y:x for x,y in DIRS_FROM_OFFSETS.items()}
class Tutorial(State):
def __init__(self, scene, tutorial):
State.__init__(self, scene)
self.tutorial = tutorial
def enter(self):
self.tutorial.initialize(self.scene)
def take_input(self, input, event):
if input == "action":
self.scene.sm.transition(self.scene.get_starting_state())
def exit(self):
self.tutorial.cleanup()
class PickFactory(State):
def __init__(self, scene):
State.__init__(self, scene)
self.factory_index = 0
self.selection = None
def enter(self):
self.selection = framesprite.FrameSprite("assets/cursor.png", 16)
self.scene.ui_group.add(self.selection)
self.scene.animatedsprites.append(self.selection)
self.update_selection()
def exit(self):
f = self.scene.factories[self.factory_index]
f.set_frame(3)
f.activated = True
w = worker.Worker(f.gx, f.gy)
self.scene.snake = [w]
self.selection.kill()
self.scene.animatedsprites.remove(self.selection)
def take_input(self, input, event):
if input == "left":
self.factory_index = (self.factory_index - 1) % len(self.scene.factories)
self.update_selection()
sound.play("blip")
elif input == "right":
self.factory_index = (self.factory_index + 1) % len(self.scene.factories)
self.update_selection()
sound.play("blip")
elif input == "action": self.scene.sm.transition(March(self.scene))
elif input == "click": self.scene.sm.transition(Edit(self.scene))
elif input == "back":
self.exit()
self.scene.game.return_to_map()
def update_selection(self):
f = self.scene.factories[self.factory_index]
self.selection.move(f.rect[0] - 2, f.rect[1] - 2)
class March(State):
def enter(self):
sound.play("factory")
self.num_steps = 0
w = self.scene.snake[0]
self.scene.game_group.add(w)
self.scene.animatedsprites.append(w)
self.flag = framesprite.FrameSprite("assets/flag.png", 12)
self.scene.game_group.add(self.flag)
self.scene.animatedsprites.append(self.flag)
self.steps = text.Text("0", "small", (0,0))
self.scene.ui_group.add(self.steps)
self.position_extras()
self.enemies_to_control = []
self.cell_arrow = framesprite.FrameSprite("assets/cellarrow.png", 9)
self.cell_arrow.move(-10,-10)
self.scene.ui_group.add(self.cell_arrow)
self.scene.animatedsprites.append(self.cell_arrow)
self.update_steps()
def position_extras(self):
self.flag.rect = (self.scene.snake[0].rect[0] - 2, self.scene.snake[0].rect[1] - 7, self.flag.rect[2], self.flag.rect[3])
self.steps.set_pos(self.scene.snake[0].rect[0] + 9, self.scene.snake[0].rect[1] - 4)
def take_input(self, input, event):
w = self.scene.snake[0]
if input == "left": self.try_move(w.gx - 1, w.gy, 2)
elif input == "right": self.try_move(w.gx + 1, w.gy, 0)
elif input == "up": self.try_move(w.gx, w.gy - 1, 3)
elif input == "down": self.try_move(w.gx, w.gy + 1, 1)
elif input == "action":
if self.enemies_to_control:
self.step_enemy(None, None)
#else:
# self.scene.load()
# self.scene.initialize_state()
elif input == "click": self.scene.sm.transition(Edit(self.scene))
elif input == "back":
self.exit()
self.scene.game.return_to_map()
elif input == "other" and event.key == pygame.K_r:
self.scene.load()
self.scene.initialize_state()
def step_enemy(self, enemy, dir):
if enemy is None:
moved = True
else:
dx, dy = OFFSETS_FROM_DIRS[dir]
tx = enemy.gx + dx
ty = enemy.gy + dy
moved = False
w = len(self.scene.road_grid[0])
h = len(self.scene.road_grid)
if tx >= 0 and tx < w and ty >= 0 and ty < h:
worker_poss = [(w.gx, w.gy) for w in self.scene.snake]
o = self.scene.object_grid[ty][tx]
if o == None and (tx,ty) not in worker_poss:
destructable = (self.scene.tilemap._grid[ty][tx] == 16 and enemy.type == "tank")
if self.scene.road_grid[ty][tx] == True or destructable:
self.scene.object_grid[enemy.gy][enemy.gx] = None
enemy.gx = tx
enemy.gy = ty
enemy.step(tx * game.TILESIZE + enemy.x_offset, ty * game.TILESIZE - 6, 0)
enemy.last_move_direction = dir
enemy.update_direction()
self.scene.object_grid[enemy.gy][enemy.gx] = enemy
moved = True
sound.play("step0")
if destructable:
self.scene.tilemap.set_tile(tx, ty, 15)
self.scene.road_grid[ty][tx] = True
autotile_around(self.scene, tx, ty)
if moved:
self.enemies_to_control.pop(0)
if self.enemies_to_control:
sound.play("cell")
self.cell_arrow.move(self.enemies_to_control[0].rect[0] + 2, self.enemies_to_control[0].rect[1] - 10)
else:
self.cell_arrow.move(-10, -10)
self.scene.overlay.set_frame(0)
else:
sound.play("cannot")
def activate_tower(self, tower):
enemies = []
for row in self.scene.object_grid:
for cell in row:
if cell and (cell.type == "soldier" or cell.type == "police" or cell.type == "tank"):
enemies.append(cell)
def dist(e):
return math.sqrt((e.gx - tower.gx) ** 2 + (e.gy - tower.gy) ** 2)
enemies.sort(key=dist)
self.enemies_to_control = enemies
if self.enemies_to_control:
self.cell_arrow.move(self.enemies_to_control[0].rect[0] + 2, self.enemies_to_control[0].rect[1] - 10)
sound.play("cell")
self.scene.overlay.set_frame(1)
def try_move(self, tx, ty, dir):
# If we used cell tower and are controlling an enemy...
if self.enemies_to_control:
self.step_enemy(self.enemies_to_control[0], dir)
return
w = self.scene.snake[0]
o = self.scene.object_grid[ty][tx]
if o == None:
self.move(tx, ty, dir)
else:
did_push = o.interact(self, w)
if len(self.scene.snake) == 0:
if all([f.activated for f in self.scene.cityhalls]):
self.scene.sm.transition(Victory(self.scene, self.num_steps))
else:
self.scene.sm.transition(Defeat(self.scene))
return
else:
self.scene.snake[0].last_move_direction = dir
self.scene.snake[0].update_direction()
if did_push:
self.move(tx, ty, dir)
def move_tanks(self):
any_steps = False
for tank in self.scene.tanks:
stepped = tank.tank_step(self.scene)
any_steps = any_steps or stepped
if any_steps:
sound.play("tankdrive")
def move(self, tx, ty, dir):
w = len(self.scene.road_grid[0])
h = len(self.scene.road_grid)
if tx >= 0 and tx < w and ty >= 0 and ty < h:
if self.scene.road_grid[ty][tx] == True and self.scene.object_grid[ty][tx] == None:
# Check if we're trying to double-back and we have a tail
if len(self.scene.snake) > 1:
if self.scene.snake[1].gx == tx and self.scene.snake[1].gy == ty:
sound.play("cannot")
self.scene.snake[0].last_move_direction = dir
self.scene.snake[0].update_direction()
return # Don't move
# Figure out the positions of the new snake
new_snake_spots = []
new_snake_spots.append((tx, ty, dir))
for i in range(1, len(self.scene.snake)):
cur = self.scene.snake[i]
next = self.scene.snake[i-1]
offset = (next.gx - cur.gx, next.gy - cur.gy)
new_snake_spots.append((next.gx, next.gy, DIRS_FROM_OFFSETS[offset]))
# If the place we're trying to go is occupied in the new arrangement of the snake, we can't do it
q = [(x,y) for (x,y,z) in new_snake_spots]
if (tx,ty) not in q[1:]:
add_worker = None
# Add a queued worker to the end of the tail
if self.scene.queued_workers > 0:
self.scene.remove_queued_worker()
add_worker = worker.Worker(self.scene.snake[-1].gx, self.scene.snake[-1].gy)
self.scene.game_group.add(add_worker)
self.scene.animatedsprites.append(add_worker)
add_worker.step(add_worker.rect[0], add_worker.rect[1], len(self.scene.snake))
for i,cell in enumerate(self.scene.snake):
ss = new_snake_spots[i]
cell.gx = ss[0]
cell.gy = ss[1]
cell.step(ss[0] * game.TILESIZE, ss[1] * game.TILESIZE - 6, i)
cell.last_move_direction = ss[2]
cell.update_direction()
# Actually add to the snake now, so it's not processed by the above loop
if add_worker:
offset = (self.scene.snake[-1].gx - add_worker.gx, self.scene.snake[-1].gy - add_worker.gy)
add_worker.last_move_direction = DIRS_FROM_OFFSETS[offset]
add_worker.update_direction()
self.scene.snake.append(add_worker)
self.num_steps += 1
self.update_steps()
if len(self.scene.snake) > 2:
sound.play("step2")
elif len(self.scene.snake) > 1:
sound.play("step1")
else:
sound.play("step0")
self.move_tanks()
else:
self.scene.snake[0].last_move_direction = dir
self.scene.snake[0].update_direction()
sound.play("cannot")
self.position_extras()
def update_steps(self):
max_steps = self.scene.game.get_max_steps()
if max_steps == 0:
self.steps.set_text(str(self.num_steps))
else:
steps_left = max_steps - self.num_steps
if steps_left < 10:
self.steps.color = (255,213,17)
if steps_left <= -1:
self.scene.sm.transition(Defeat(self.scene))
self.steps.set_text(str(steps_left))
def exit(self):
if self.flag in self.scene.animatedsprites:
self.scene.animatedsprites.remove(self.flag)
self.flag.kill()
self.steps.kill()
self.scene.overlay.kill()
AUTODEF = {
# (L, U, R, D): tile index
(1,0,0,0): 12,
(0,1,0,0): 13,
(0,0,1,0): 14,
(0,0,0,1): 11,
(1,1,0,0):4,
(0,1,1,0):5,
(0,0,1,1):6,
(1,0,0,1):3,
(1,0,1,0):1,
(0,1,0,1):2,
(1,1,1,0):10,
(0,1,1,1):7,
(1,0,1,1):8,
(1,1,0,1):9,
(1,1,1,1):15,
}
class Edit(State):
def take_input(self, inp, event):
if inp == "click":
pos = (event.pos[0] // game.TILESIZE // game.SCALE, event.pos[1] // game.TILESIZE // game.SCALE)
self.place(*pos)
elif inp == "rightclick":
pos = (event.pos[0] // game.TILESIZE // game.SCALE, event.pos[1] // game.TILESIZE // game.SCALE)
self.place_obj(self.tileselect._frame, *pos)
elif inp == "back":
self.exit()
self.scene.initialize_state()
elif inp == "right":
self.tileselect.set_frame((self.tileselect._frame + 1) % len(game.OBJ))
elif inp == "left":
self.tileselect.set_frame((self.tileselect._frame - 1) % len(game.OBJ))
elif inp == "up":
self.deco = (self.deco - 1) % (len(game.DECOTILES) + 1)
if self.deco == 0:
self.decoselect.set_frame(15)
else:
self.decoselect.set_frame(self.deco - 1 + game.DECOTILES[0])
elif inp == "down":
self.deco = (self.deco + 1) % (len(game.DECOTILES) + 1)
if self.deco == 0:
self.decoselect.set_frame(15)
else:
self.decoselect.set_frame(self.deco - 1 + game.DECOTILES[0])
elif inp == "action":
print(self.scene.level_file)
fn = input()
self.save(fn)
elif inp == "other":
if event.key == pygame.K_j: self.shift(-1,0)
if event.key == pygame.K_i: self.shift(0,-1)
if event.key == pygame.K_l: self.shift(1,0)
if event.key == pygame.K_k: self.shift(0,1)
def shift(self, dx, dy):
w = len(self.scene.tilemap._grid[0])
h = len(self.scene.tilemap._grid)
tg = []
og = []
for y in range(h):
tg.append([])
og.append([])
for x in range(w):
tv = 0
ov = 0
if y - dy >= 0 and y - dy < h and x - dx >= 0 and x - dx < w:
tv = self.scene.tilemap._grid[y - dy][x - dx]
ov = self.scene.object_grid[y - dy][x - dx]
if ov:
ov.rect = (ov.rect[0] + dx * 12, ov.rect[1] + dy * 12, ov.rect[2], ov.rect[3])
tg[-1].append(tv)
og[-1].append(ov)
self.scene.tilemap._grid = tg
self.scene.tilemap.invalidate()
self.scene.tilemap.update_image()
self.scene.object_grid = og
def save(self, name):
w = len(self.scene.tilemap._grid[0])
h = len(self.scene.tilemap._grid)
f1 = open(resource_path("levels/%s_tiles.csv" % name), "w")
f2 = open(resource_path("levels/%s_objects.csv" % name), "w")
tile_writer = csv.writer(f1)
object_writer = csv.writer(f2)
for y in range(h):
tile_row = []
object_row = []
for x in range(w):
tile_row.append(str(self.scene.tilemap._grid[y][x]))
obj = self.scene.object_grid[y][x]
obj_value = -1
if obj:
obj_value = game.OBJ[obj.type]
object_row.append(str(obj_value))
tile_writer.writerow(tile_row)
object_writer.writerow(object_row)
f1.close()
f2.close()
def place_obj(self, obj, gx, gy):
if self.scene.object_grid[gy][gx]:
o = self.scene.object_grid[gy][gx]
o.kill()
self.scene.object_grid[gy][gx] = None
else:
self.scene.place_obj_by_index(obj, gx, gy)
def place(self, gx, gy):
if self.deco == 0:
if self.scene.tilemap._grid[gy][gx] in game.ROADTILES:
self.scene.tilemap.set_tile(gx, gy, 0)
else:
self.scene.tilemap.set_tile(gx, gy, 1)
self.autotile_around(gx, gy)
else:
if self.scene.tilemap._grid[gy][gx] == self.decoselect._frame:
self.scene.tilemap.set_tile(gx, gy, 0)
else:
self.scene.tilemap.set_tile(gx, gy, self.decoselect._frame)
if self.deco == 1:
self.autotile_around(gx,gy)
def autotile_around(self, cx, cy):
autotile_around(self.scene, cx, cy)
def exit(self):
self.save("temp")
self.scene.ui_group.remove(self.tileselect)
self.scene.ui_group.remove(self.decoselect)
# Save tiles and objects
# Reload objects
self.scene.level_file = "temp"
self.scene.load()
def enter(self):
self.scene.load()
self.tileselect = framesprite.FrameSprite("assets/objects.png", 12)
self.scene.ui_group.add(self.tileselect)
self.decoselect = framesprite.FrameSprite("assets/tiles.png", 12)
self.scene.ui_group.add(self.decoselect)
self.decoselect.rect = (0, 12, self.decoselect.rect[2], self.decoselect.rect[3])
self.decoselect.set_frame(15)
self.deco = 0
class Victory(State):
def __init__(self, scene, num_steps):
State.__init__(self, scene)
self.num_steps = num_steps
def enter(self):
self.victory_image = pygame.image.load(resource_path("assets/victory.png"))
self.victory_t = 0
self.angle = 0
self.scale = 0
self.num_stars = self.scene.game.record_victory(self.num_steps)
sound.play_music('victory', 0)
self.stars = []
def add_star(self, filled):
x = len(self.stars) * 32 + 120 - 45
y = 163
s = framesprite.FrameSprite("assets/bigstar.png", 29)
s.set_frame((0,1)[filled])
s.move(x,y)
self.stars.append(s)
self.scene.ui_group.add(s)
def update(self, dt):
self.victory_t += dt
t = self.victory_t * 0.75
max_angle = 360 * 6
zt = min(math.sin(min(t * 1.15,1) * 3.1415 / 2), 1)
self.angle = max_angle * zt
self.scale = min(t * 1.15, 1)
if self.victory_t > 4.5:
self.exit()
self.scene.game.return_to_map(won=True)
if self.victory_t > 2 and len(self.stars) == 0:
if self.num_stars > 0:
self.add_star(True)
else:
self.add_star(False)
if self.victory_t > 2.25 and len(self.stars) == 1:
if self.num_stars > 1:
self.add_star(True)
else:
self.add_star(False)
if self.victory_t > 2.5 and len(self.stars) == 2:
if self.num_stars > 2:
self.add_star(True)
else:
self.add_star(False)
def take_input(self, input, event):
if input == "click":
self.scene.sm.transition(Edit(self.scene))
def render(self, screen):
paper = pygame.transform.rotozoom(self.victory_image, self.angle, self.scale)
screen.blit(paper, (screen.get_width() / 2 - paper.get_width() / 2, screen.get_height() / 2 - paper.get_height() / 2))
class Defeat(State):
def enter(self):
self.deftext = text.Text("Defeat...", "huge", (55, -20))
self.deftext_t = 0
self.scene.ui_group.add(self.deftext)
sound.play("defeat")
def update(self, dt):
self.deftext_t += dt
y = ( - math.cos(min(self.deftext_t,1) * 3.14159) * 0.5 + 0.5) ** 1.5 * 102 - 20
self.deftext.set_pos(self.deftext.rect[0], y)
if self.deftext_t > 3:
self.exit()
self.scene.load()
self.scene.initialize_state()
def take_input(self, input, event):
if input == "click":
self.scene.sm.transition(Edit(self.scene))
class Defeat_no(State):
def enter(self):
self.defeat_image = pygame.image.load(resource_path("assets/defeat.png"))
self.defeat_t = 0
self.angle = 0
self.scale = 0
sound.play("defeat")
def update(self, dt):
self.defeat_t += dt
max_angle = 360 * 8
self.angle = min(((self.defeat_t + 0.5) ** 0.5) * max_angle * 0.85, max_angle)
self.scale = min(self.defeat_t * 1, 1)
if self.defeat_t > 3:
self.exit()
self.scene.load()
self.scene.initialize_state()
def take_input(self, input, event):
if input == "click":
self.scene.sm.transition(Edit(self.scene))
def render(self, screen):
paper = pygame.transform.rotozoom(self.defeat_image, self.angle, self.scale)
screen.blit(paper, (screen.get_width() / 2 - paper.get_width() / 2, screen.get_height() / 2 - paper.get_height() / 2))
class TankFireState(State):
def __init__(self, scene, tank):
self.tank = tank
State.__init__(self, scene)
def update(self, dt):
self.tank.fire_update(self.scene, dt) | [
"[email protected]"
]
| |
39475ae26aaa932b43d254a8b348976f7b7b4d1b | c4a046a62e933d72d3404787429d0840517ae9bd | /sandbox/gkahn/gcg/envs/env_utils.py | 4501126847636df8b691476716de4cc38ea5e34e | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | JasonTOKO/gcg | 6d1cff2307e1bae6790357ea569ed4cca594eb1d | e48c5cf47bfbc879c9477a8c98b3b108d43413af | refs/heads/gcg_release | 2020-04-29T02:30:28.133570 | 2017-12-26T17:06:14 | 2017-12-26T17:06:14 | 175,770,942 | 1 | 0 | NOASSERTION | 2019-03-15T07:32:36 | 2019-03-15T07:32:35 | null | UTF-8 | Python | false | false | 784 | py | from rllab.misc.ext import set_seed
### environments
import gym
from sandbox.rocky.tf.envs.base import TfEnv
from rllab.envs.normalized_env import normalize
def create_env(env_str, is_normalize=True, seed=None):
from rllab.envs.gym_env import GymEnv, FixedIntervalVideoSchedule
from sandbox.gkahn.gcg.envs.rccar.square_env import SquareEnv
from sandbox.gkahn.gcg.envs.rccar.square_cluttered_env import SquareClutteredEnv
from sandbox.gkahn.gcg.envs.rccar.cylinder_env import CylinderEnv
inner_env = eval(env_str)
if is_normalize:
inner_env = normalize(inner_env)
env = TfEnv(inner_env)
# set seed
if seed is not None:
set_seed(seed)
if isinstance(inner_env, GymEnv):
inner_env.env.seed(seed)
return env
| [
"[email protected]"
]
| |
0984a460f47fe467dc4b1b0a1b5591fed90dc568 | db7a459e31c0a186dca64a829f93090fa58feab0 | /ai_learning/data_structure/sort05_quick.py | 1d18ed1f5e05491e090069d04ec5228d88187b18 | []
| no_license | ZouJoshua/dl_project | a3e7c9e035c37af698d4ef388fbb8c46174d5de1 | ee7ecedd55ce544b127be8009e026ac2cdc3f71b | refs/heads/master | 2022-12-04T04:21:19.937698 | 2022-01-27T07:33:37 | 2022-01-27T07:33:37 | 175,645,793 | 9 | 3 | null | 2022-11-21T21:30:23 | 2019-03-14T15:07:27 | Python | UTF-8 | Python | false | false | 2,001 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author : Joshua
@Time : 4/27/20 1:47 PM
@File : sort05_quick.py
@Desc : 快速排序
"""
def quick_sort(alist, l, r):
"""
快速排序算法
平均时间复杂度O(nlogn)
最优时间复杂度(序列是有序的O(n))
最坏时间复杂度O(n^2)
稳定
:param alist:
:param left:
:param right:
:return:
"""
if l >= r:
return
mid_value = alist[l]
left = l
right = r
while left < right:
while left < right and alist[right] >= mid_value:
right -= 1
alist[left] = alist[right]
while left < right and alist[left] < mid_value:
left += 1
alist[right] = alist[left]
# 从循环退出时left和right是相等的
alist[left] = mid_value
quick_sort(alist, l, left-1)
quick_sort(alist, left+1, r)
def quick_sort_v1(alist, left, right):
"""
快速排序算法
平均时间复杂度:O(nlogn)
最优时间复杂度:(序列是有序的O(n))
最坏时间复杂度:O(n^2)
空间复杂度:O(logn)~O(n)
不稳定
:param alist:
:param left:
:param right:
:return:
"""
def quick(alist, left, right):
mid_value = alist[left]
while left < right:
while left < right and alist[right] >= mid_value:
right -= 1
alist[left] = alist[right]
while left < right and alist[left] < mid_value:
left += 1
alist[right] = alist[left]
# 从循环退出时left和right是相等的
alist[left] = mid_value
return left
if left < right:
mid_value = quick(alist, left, right)
quick_sort(alist, mid_value, mid_value-1)
quick_sort(alist, mid_value+1, right)
return alist
if __name__ == "__main__":
a = [3, 42, 5, 1, 55, 23, 44, 54, 32, 8, 10]
quick_sort(a, 0, len(a)-1)
# s = quick_sort_v1(a, 0, len(a)-1)
print(a) | [
"[email protected]"
]
| |
a5a9e4a77f543308bc1b3f321af2c9e0d305c91a | 275bc864a84723d6767207573017c7258d60370c | /Refinement_based_extraction/Training_Functions.py | 61a9c272c05e1b2fde13837fa44901eace8ab83e | [
"MIT"
]
| permissive | DES-Lab/Extracting-FSM-From-RNNs | ccfa7286b3894fde9e30405fdd2089c54a0b650a | 761b78aed155827b6bb6479daf17a144e7ec8560 | refs/heads/master | 2023-04-08T04:57:16.214037 | 2022-04-14T15:38:07 | 2022-04-14T15:38:07 | 357,165,036 | 15 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,688 | py | from Refinement_based_extraction.Helper_Functions import n_words_of_length
def make_train_set_for_target(target,alphabet,lengths=None,max_train_samples_per_length=300,search_size_per_length=1000,provided_examples=None):
train_set = {}
if None is provided_examples:
provided_examples = []
if None is lengths:
lengths = list(range(15))+[15,20,25,30]
for l in lengths:
samples = [w for w in provided_examples if len(w)==l]
samples += n_words_of_length(search_size_per_length,l,alphabet)
pos = [w for w in samples if target(w)]
neg = [w for w in samples if not target(w)]
pos = pos[:int(max_train_samples_per_length/2)]
neg = neg[:int(max_train_samples_per_length/2)]
minority = min(len(pos),len(neg))
pos = pos[:minority+20]
neg = neg[:minority+20]
train_set.update({w:True for w in pos})
train_set.update({w:False for w in neg})
#print("made train set of size:",len(train_set),", of which positive examples:",
# len([w for w in train_set if train_set[w]==True]))
return train_set
#curriculum
def mixed_curriculum_train(rnn,train_set,outer_loops=3,stop_threshold=0.001,learning_rate=0.001,
length_epochs=5,random_batch_epochs=100,single_batch_epochs=100,random_batch_size=20, show = False):
lengths = sorted(list(set([len(w) for w in train_set])))
for _ in range(outer_loops):
for l in lengths:
training = {w:train_set[w] for w in train_set if len(w)==l}
if len(set([training[w] for w in training])) <= 1: #empty, or length with only one classification
continue
rnn.train_group(training,length_epochs,show=False,loss_every=20,stop_threshold=stop_threshold,
learning_rate=learning_rate,batch_size=None,print_time=False)
# all together but in batches
if rnn.finish_signal == rnn.train_group(train_set,random_batch_epochs,show=show,loss_every=20,
stop_threshold = stop_threshold,
learning_rate=learning_rate,
batch_size=random_batch_size,print_time=False):
break
# all together in one batch
if rnn.finish_signal == rnn.train_group(train_set,single_batch_epochs,show=show,loss_every=20,
stop_threshold = stop_threshold,
learning_rate=learning_rate,batch_size=None,print_time=False):
break
print("classification loss on last batch was:",rnn.all_losses[-1]) | [
"[email protected]"
]
| |
53655fd4008f110145660387e8700896033eb634 | 08428ba80f90f73bbce19e5bd0f423a1b4d025d7 | /src/project_requests/urls.py | 0dc1335179831f78e8a61384807b73fa52521851 | []
| no_license | marcoverl/openstack-security-integrations | 0d3afe093b361c548b65be9e405e10318d51c7cd | 58c560885b007cf25444e552de17c0d6a5a0e716 | refs/heads/master | 2021-01-16T21:18:56.071490 | 2014-06-17T07:56:48 | 2014-06-17T07:56:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
from openstack_dashboard.dashboards.project.project_requests import views
prefix = 'openstack_dashboard.dashboards.project.project_requests.views'
urlpatterns = patterns(prefix,
url(r'^$', views.RequestView.as_view(), name='index'))
| [
"[email protected]"
]
| |
25acdb599ea1581404dd9bbd43b3f18bf10d0365 | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/__init__.py | 321149c809a62cbbcdc1f550735a614802d8fbbc | []
| no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61,830 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
__slots__ = ('_path_helper', '_extmethods', '__max_prefixes','__prevent_teardown','__warning_threshold_pct','__restart_timer',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv6-labeled-unicast', 'prefix-limit', 'state']
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """max_prefixes must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
})
self.__max_prefixes = t
if hasattr(self, '_set'):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prevent_teardown must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__prevent_teardown = t
if hasattr(self, '_set'):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
def _get_warning_threshold_pct(self):
"""
Getter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__warning_threshold_pct
def _set_warning_threshold_pct(self, v, load=False):
"""
Setter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_warning_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_warning_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """warning_threshold_pct must be of a type compatible with oc-types:percentage""",
'defined-type': "oc-types:percentage",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)""",
})
self.__warning_threshold_pct = t
if hasattr(self, '_set'):
self._set()
def _unset_warning_threshold_pct(self):
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """restart_timer must be of a type compatible with decimal64""",
'defined-type': "decimal64",
'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)""",
})
self.__restart_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
max_prefixes = __builtin__.property(_get_max_prefixes)
prevent_teardown = __builtin__.property(_get_prevent_teardown)
warning_threshold_pct = __builtin__.property(_get_warning_threshold_pct)
restart_timer = __builtin__.property(_get_restart_timer)
_pyangbind_elements = OrderedDict([('max_prefixes', max_prefixes), ('prevent_teardown', prevent_teardown), ('warning_threshold_pct', warning_threshold_pct), ('restart_timer', restart_timer), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
__slots__ = ('_path_helper', '_extmethods', '__max_prefixes','__prevent_teardown','__warning_threshold_pct','__restart_timer',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv6-labeled-unicast', 'prefix-limit', 'state']
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """max_prefixes must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
})
self.__max_prefixes = t
if hasattr(self, '_set'):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prevent_teardown must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__prevent_teardown = t
if hasattr(self, '_set'):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
def _get_warning_threshold_pct(self):
"""
Getter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__warning_threshold_pct
def _set_warning_threshold_pct(self, v, load=False):
"""
Setter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_warning_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_warning_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """warning_threshold_pct must be of a type compatible with oc-types:percentage""",
'defined-type': "oc-types:percentage",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)""",
})
self.__warning_threshold_pct = t
if hasattr(self, '_set'):
self._set()
def _unset_warning_threshold_pct(self):
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """restart_timer must be of a type compatible with decimal64""",
'defined-type': "decimal64",
'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)""",
})
self.__restart_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
max_prefixes = __builtin__.property(_get_max_prefixes)
prevent_teardown = __builtin__.property(_get_prevent_teardown)
warning_threshold_pct = __builtin__.property(_get_warning_threshold_pct)
restart_timer = __builtin__.property(_get_restart_timer)
_pyangbind_elements = OrderedDict([('max_prefixes', max_prefixes), ('prevent_teardown', prevent_teardown), ('warning_threshold_pct', warning_threshold_pct), ('restart_timer', restart_timer), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
__slots__ = ('_path_helper', '_extmethods', '__max_prefixes','__prevent_teardown','__warning_threshold_pct','__restart_timer',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv6-labeled-unicast', 'prefix-limit', 'state']
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """max_prefixes must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
})
self.__max_prefixes = t
if hasattr(self, '_set'):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prevent_teardown must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__prevent_teardown = t
if hasattr(self, '_set'):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
def _get_warning_threshold_pct(self):
"""
Getter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__warning_threshold_pct
def _set_warning_threshold_pct(self, v, load=False):
"""
Setter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_warning_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_warning_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """warning_threshold_pct must be of a type compatible with oc-types:percentage""",
'defined-type': "oc-types:percentage",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)""",
})
self.__warning_threshold_pct = t
if hasattr(self, '_set'):
self._set()
def _unset_warning_threshold_pct(self):
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """restart_timer must be of a type compatible with decimal64""",
'defined-type': "decimal64",
'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)""",
})
self.__restart_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
max_prefixes = __builtin__.property(_get_max_prefixes)
prevent_teardown = __builtin__.property(_get_prevent_teardown)
warning_threshold_pct = __builtin__.property(_get_warning_threshold_pct)
restart_timer = __builtin__.property(_get_restart_timer)
_pyangbind_elements = OrderedDict([('max_prefixes', max_prefixes), ('prevent_teardown', prevent_teardown), ('warning_threshold_pct', warning_threshold_pct), ('restart_timer', restart_timer), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/ipv6-labeled-unicast/prefix-limit/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State information relating to the prefix-limit for the
AFI-SAFI
"""
__slots__ = ('_path_helper', '_extmethods', '__max_prefixes','__prevent_teardown','__warning_threshold_pct','__restart_timer',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'bgp', 'peer-groups', 'peer-group', 'afi-safis', 'afi-safi', 'ipv6-labeled-unicast', 'prefix-limit', 'state']
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """max_prefixes must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
})
self.__max_prefixes = t
if hasattr(self, '_set'):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """prevent_teardown must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__prevent_teardown = t
if hasattr(self, '_set'):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
def _get_warning_threshold_pct(self):
"""
Getter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__warning_threshold_pct
def _set_warning_threshold_pct(self, v, load=False):
"""
Setter method for warning_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/warning_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_warning_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_warning_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """warning_threshold_pct must be of a type compatible with oc-types:percentage""",
'defined-type': "oc-types:percentage",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)""",
})
self.__warning_threshold_pct = t
if hasattr(self, '_set'):
self._set()
def _unset_warning_threshold_pct(self):
self.__warning_threshold_pct = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="warning-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=False)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/ipv6_labeled_unicast/prefix_limit/state/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """restart_timer must be of a type compatible with decimal64""",
'defined-type': "decimal64",
'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)""",
})
self.__restart_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=False)
max_prefixes = __builtin__.property(_get_max_prefixes)
prevent_teardown = __builtin__.property(_get_prevent_teardown)
warning_threshold_pct = __builtin__.property(_get_warning_threshold_pct)
restart_timer = __builtin__.property(_get_restart_timer)
_pyangbind_elements = OrderedDict([('max_prefixes', max_prefixes), ('prevent_teardown', prevent_teardown), ('warning_threshold_pct', warning_threshold_pct), ('restart_timer', restart_timer), ])
| [
"[email protected]"
]
| |
603c1e8258762426c4676d2615db71ac122ca5f1 | 14ed6c8bf8f735bd08e7d9d3a06ab71b06335a82 | /update.py | 1d00a9614c9b3cef7b10fd6b2a83dd90174807ae | []
| no_license | HackLB/garage_sales | 8e9ef258c1fa5ec55e85e3819ce07f0ad077ae48 | 78b865ccbf40471ee45b4925b18da56968ee2f6c | refs/heads/master | 2021-01-11T06:52:43.886156 | 2017-09-03T01:00:07 | 2017-09-03T01:00:07 | 72,356,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,596 | py | #!/usr/bin/env python
import os, sys
import requests
from bs4 import BeautifulSoup
from pprint import pprint
import simplejson as json
import hashlib
from geopy.geocoders import Nominatim, GoogleV3
from geopy.exc import GeocoderTimedOut
with open('../secrets.json') as f:
secrets = json.load(f)
geolocator = GoogleV3(api_key=secrets['google_api_key'])
url = 'https://wwwbitprod1.longbeach.gov/GarageSalePermit/SearchByDate.aspx'
def getmd5(message):
"""
Returns MD5 hash of string passed to it.
"""
return hashlib.md5(message.encode('utf-8')).hexdigest()
def scrape_records():
"""
Extracts garage sale records from the city garage sale Web page,
then puts each record into a dictionary and returns a list of dictionaries.
"""
print('Getting garage sales data...')
r = requests.get(url)
soup = BeautifulSoup(r.content, 'html.parser')
rows = soup.find('table', {'class': 'DataWebControlStyle'}).find_all('tr')
records = []
for row in rows[1:]:
cells = row.find_all('td')
location = cells[0].string.strip()
dates = [cells[1].string.strip()]
record = {'location': location, 'dates': dates}
records.append(record)
pprint(record)
return records
def get_subdirectory(base_name):
"""
Takes the base filename and returns a path to a subdirectory, creating it if needed.
"""
sub_dir = os.path.join(data_path, base_name[-8:-6], base_name[-6:-4], base_name[-4:-2])
os.makedirs(sub_dir, exist_ok=True)
return sub_dir
def geocode(address_stub):
address = '{}, LONG BEACH, CA'.format(address_stub)
try:
location = geolocator.geocode(address, timeout=2)
if location:
return {"latitude": location.latitude, "longitude": location.longitude, "address": location.address}
else:
return None
except GeocoderTimedOut:
return geocode(address)
def save_records(records):
"""
Saves records to invidual JSON files.
Records are per-address. Each new garage sale for
a given address gets appended to its existing file.
Files are named and organized based on an MD5 of
the address.
"""
print('Saving garage sales data...')
for record in records:
location_hash = getmd5(record['location'])
file_name = '{}.json'.format(location_hash)
directory = get_subdirectory(location_hash)
path = os.path.join(directory, file_name)
if os.path.exists(path):
with open(path) as f:
existing_data = json.load(f)
if record['dates'][0] not in existing_data['dates']:
existing_data['dates'].extend(record['dates'])
with open(path, 'w') as f:
json.dump(existing_data, f, indent=4, ensure_ascii=False, sort_keys=True)
else:
geocoded_location = geocode(record['location'])
if geocoded_location:
record['coordinates'] = geocoded_location
with open(path, 'w') as f:
json.dump(record, f, indent=4, ensure_ascii=False, sort_keys=True)
if __name__ == "__main__":
repo_path = os.path.dirname(os.path.realpath(sys.argv[0])) # Path to current directory
data_path = os.path.join(repo_path, '_data') # Root path for record data
os.makedirs(data_path, exist_ok=True)
records = scrape_records() # Scrape garage sale records...
save_records(records) # Save the scraped records to JSON files...
| [
"[email protected]"
]
| |
93c5bfc63f145f5c6a7b441e8bc989771e798849 | 7136e5242793b620fa12e9bd15bf4d8aeb0bfe7a | /adspygoogle/dfp/zsi/v201010/LineItemCreativeAssociationService_services.py | a543d1f0478ee699edbcb3c5695e3c57cc42665a | [
"Apache-2.0"
]
| permissive | hockeyprincess/google-api-dfp-python | 534519695ffd26341204eedda7a8b50648f12ea9 | efa82a8d85cbdc90f030db9d168790c55bd8b12a | refs/heads/master | 2021-01-10T10:01:09.445419 | 2011-04-14T18:25:38 | 2011-04-14T18:25:38 | 52,676,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,486 | py | ##################################################
# LineItemCreativeAssociationService_services.py
# generated by ZSI.generate.wsdl2python
##################################################
from LineItemCreativeAssociationService_services_types import *
import urlparse, types
from ZSI.TCcompound import ComplexType, Struct
from ZSI import client
import ZSI
# Locator
class LineItemCreativeAssociationServiceLocator:
LineItemCreativeAssociationServiceInterface_address = "https://www.google.com:443/apis/ads/publisher/v201010/LineItemCreativeAssociationService"
def getLineItemCreativeAssociationServiceInterfaceAddress(self):
return LineItemCreativeAssociationServiceLocator.LineItemCreativeAssociationServiceInterface_address
def getLineItemCreativeAssociationServiceInterface(self, url=None, **kw):
return LineItemCreativeAssociationServiceSoapBindingSOAP(url or LineItemCreativeAssociationServiceLocator.LineItemCreativeAssociationServiceInterface_address, **kw)
# Methods
class LineItemCreativeAssociationServiceSoapBindingSOAP:
def __init__(self, url, **kw):
kw.setdefault("readerclass", None)
kw.setdefault("writerclass", None)
# no resource properties
self.binding = client.Binding(url=url, **kw)
# no ws-addressing
# op: createLineItemCreativeAssociation
def createLineItemCreativeAssociation(self, request):
if isinstance(request, createLineItemCreativeAssociationRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(createLineItemCreativeAssociationResponse.typecode)
return response
# op: createLineItemCreativeAssociations
def createLineItemCreativeAssociations(self, request):
if isinstance(request, createLineItemCreativeAssociationsRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(createLineItemCreativeAssociationsResponse.typecode)
return response
# get: getLineItemCreativeAssociation
def getLineItemCreativeAssociation(self, request):
if isinstance(request, getLineItemCreativeAssociationRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(getLineItemCreativeAssociationResponse.typecode)
return response
# get: getLineItemCreativeAssociationsByStatement
def getLineItemCreativeAssociationsByStatement(self, request):
if isinstance(request, getLineItemCreativeAssociationsByStatementRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(getLineItemCreativeAssociationsByStatementResponse.typecode)
return response
# op: performLineItemCreativeAssociationAction
def performLineItemCreativeAssociationAction(self, request):
if isinstance(request, performLineItemCreativeAssociationActionRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(performLineItemCreativeAssociationActionResponse.typecode)
return response
# op: updateLineItemCreativeAssociation
def updateLineItemCreativeAssociation(self, request):
if isinstance(request, updateLineItemCreativeAssociationRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(updateLineItemCreativeAssociationResponse.typecode)
return response
# op: updateLineItemCreativeAssociations
def updateLineItemCreativeAssociations(self, request):
if isinstance(request, updateLineItemCreativeAssociationsRequest) is False:
raise TypeError, "%s incorrect request type" % (request.__class__)
kw = {}
# no input wsaction
self.binding.Send(None, None, request, soapaction="", **kw)
# no output wsaction
response = self.binding.Receive(updateLineItemCreativeAssociationsResponse.typecode)
return response
createLineItemCreativeAssociationRequest = ns0.createLineItemCreativeAssociation_Dec().pyclass
createLineItemCreativeAssociationResponse = ns0.createLineItemCreativeAssociationResponse_Dec().pyclass
createLineItemCreativeAssociationsRequest = ns0.createLineItemCreativeAssociations_Dec().pyclass
createLineItemCreativeAssociationsResponse = ns0.createLineItemCreativeAssociationsResponse_Dec().pyclass
getLineItemCreativeAssociationRequest = ns0.getLineItemCreativeAssociation_Dec().pyclass
getLineItemCreativeAssociationResponse = ns0.getLineItemCreativeAssociationResponse_Dec().pyclass
getLineItemCreativeAssociationsByStatementRequest = ns0.getLineItemCreativeAssociationsByStatement_Dec().pyclass
getLineItemCreativeAssociationsByStatementResponse = ns0.getLineItemCreativeAssociationsByStatementResponse_Dec().pyclass
performLineItemCreativeAssociationActionRequest = ns0.performLineItemCreativeAssociationAction_Dec().pyclass
performLineItemCreativeAssociationActionResponse = ns0.performLineItemCreativeAssociationActionResponse_Dec().pyclass
updateLineItemCreativeAssociationRequest = ns0.updateLineItemCreativeAssociation_Dec().pyclass
updateLineItemCreativeAssociationResponse = ns0.updateLineItemCreativeAssociationResponse_Dec().pyclass
updateLineItemCreativeAssociationsRequest = ns0.updateLineItemCreativeAssociations_Dec().pyclass
updateLineItemCreativeAssociationsResponse = ns0.updateLineItemCreativeAssociationsResponse_Dec().pyclass
| [
"api.sgrinberg@7990c6e4-1bfd-11df-85e6-9b4bd7dd5138"
]
| api.sgrinberg@7990c6e4-1bfd-11df-85e6-9b4bd7dd5138 |
dadb74301dd4de7542922d6102444f2aa9f68ad5 | dbeb1e145eba012a200073038d8a8965ae0c6f5d | /Visualize/surface_3d.py | 167ea594250b9f07a7afab12a5d1e0b9efa58eff | []
| no_license | hellJane/Python_DataAnalysis | b7027cb9d8e75a98b5626a58ee85b64f62c54c9c | 966ee5d732e074e9d124333f13d3e3e23ade1edc | refs/heads/master | 2021-05-17T01:57:24.092791 | 2017-12-01T15:32:32 | 2017-12-01T15:32:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,397 | py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import LinearLocator, FormatStrFormatter
'''
这个用的比较多,写一下常用参数的含义:
X, Y, Z: 显然这是数据样本点的参数,这里的格式就需要跟contour()一样了,都是2D的数组,即meshgrid
rstride: 绘制表面的行间距
cstride: 绘制表面的列间距
rcount: 绘制表面的行上限
ccount: 绘制表面的列上限
color: 颜色
cmap: 渐变的颜色
....余下几个不常用的不列举出来了
'''
fig = plt.figure()
ax = fig.gca(projection='3d') # 除了add_subplot(111, projection='3d')的另一种方法
# Generate Data
X = np.arange(-5, 5, 0.25)
Y = np.arange(-5, 5, 0.25)
X, Y = np.meshgrid(X, Y) # 必须是meshgrid
R = np.sqrt(X**2 + Y**2)
Z = np.sin(R) # X, Y, Z都是2D数组
# surf = ax.plot_surface(X, Y, Z, color='r') # 表面全是红色
# surf = ax.plot_surface(X, Y, Z, cmap='jet') # 表面是cmap定义的变化的颜色,Z值越高,颜色越暖,基本上
surf = ax.plot_surface(X, Y, Z, cmap='jet') # 抹去每个小方格之间的界线
ax.set_zlim(-1.01, 1.01)
ax.zaxis.set_major_locator(LinearLocator(10)) # 完全按照zlim来等分10个ticks
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) # 每个tick保留两位小数
plt.show()
| [
"[email protected]"
]
| |
2ec6b6118058d57111beec5e02205c2fd1ddff50 | 5cc8c3690f2398698d78800734f7d1ba5dc5a515 | /notebooks/loader.py | d2725ef5eb97333649f1520c70fcefb1cbf8eed4 | [
"MIT"
]
| permissive | kaiyingshan/ode-solver | 0094f06e1bb9f265517b4befec0c04bd3a9d9407 | 30c6798efe9c35a088b2c6043493470701641042 | refs/heads/master | 2020-04-23T00:39:22.148183 | 2019-05-07T17:35:34 | 2019-05-07T17:35:34 | 170,787,367 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | import os
import sys
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
| [
"[email protected]"
]
| |
6bc942a94a76a3549acab8a58b09d349fd303f10 | 8ae07790f074439a329f55f3ed3408e2ba775a74 | /Desktop/packages/rmutil/UnixDriveDetector.py | 6097e90559c3ec61aeda0a70120d58db5927bc00 | [
"Apache-2.0"
]
| permissive | peter9teufel/usb-kiosk | cf06bc7c612b99860f8e1109a293c24e10e7a016 | be931bfe79636e8280748e06d29a4c86af423478 | refs/heads/master | 2020-04-06T04:10:04.711126 | 2015-02-20T11:43:38 | 2015-02-20T11:43:38 | 21,199,497 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,160 | py | import threading, sys, os, time, platform, getpass
import wx
if platform.system() == "Linux":
from wx.lib.pubsub import setupkwargs
from wx.lib.pubsub import pub as Publisher
else:
from wx.lib.pubsub import pub as Publisher
if platform.system() == "Linux":
if 'fedora' in platform.dist():
user = getpass.getuser()
VOLUMES_PATH = "/run/media/" + user
else:
VOLUMES_PATH = "/media"
else:
VOLUMES_PATH = "/Volumes"
bg_thread = None
runFlag = True
volumes = None
def waitForUSBDrive():
# load current list of volumes
global volumes
volumes = os.listdir(VOLUMES_PATH)
global bg_thread
bg_thread = BackgroundUSBDetection()
bg_thread.daemon = True
bg_thread.start()
bg_thread.join()
# RESULT CALL --> wx.CallAfter(Publisher.sendMessage, 'usb_connected', path=drive_path)
### THREAD FOR ASYNC USB DETECTION ###
class BackgroundUSBDetection(threading.Thread):
def __init__(self):
self.run_event = threading.Event()
threading.Thread.__init__(self, name="Mac_Drive_Detector")
def run(self):
print "Thread started..."
global runFlag, volumes
tries = 0
while runFlag and tries < 10:
# check volumes
curVols = os.listdir(VOLUMES_PATH)
newVol = self.NewVolumes(volumes, curVols)
# update list of volumes in case a volume was disconnected (e.g. retry plugging USB)
volumes = curVols
if len(newVol) > 0:
wx.CallAfter(Publisher.sendMessage, 'usb_connected', path=VOLUMES_PATH + '/' + newVol[0])
runFlag = False
time.sleep(2)
tries += 1
if tries == 10:
# not found --> send timout message
wx.CallAfter(Publisher.sendMessage, 'usb_search_timeout')
def NewVolumes(self, oldVolumes, curVolumes):
newVol = []
for volume in curVolumes:
if not volume in oldVolumes:
newVol.append(volume)
return newVol
if __name__=='__main__':
# load current list of volumes
volumes = os.listdir(VOLUMES_PATH)
waitForUSBDrive()
| [
"[email protected]"
]
| |
9042aa99583f972e7c0f07daa53deb9a89199f8c | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2020_07_01_preview/_policy_client.py | 581050251e24acfacfc172e0aca18058b3633016 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 4,057 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from . import models as _models
from .._serialization import Deserializer, Serializer
from ._configuration import PolicyClientConfiguration
from .operations import PolicyExemptionsOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class PolicyClient: # pylint: disable=client-accepts-api-version-keyword
"""To exempt your resources from policy evaluation and non-compliance state, you can create an
exemption at a scope.
:ivar policy_exemptions: PolicyExemptionsOperations operations
:vartype policy_exemptions:
azure.mgmt.resource.policy.v2020_07_01_preview.operations.PolicyExemptionsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2020-07-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = PolicyClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.policy_exemptions = PolicyExemptionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self) -> None:
self._client.close()
def __enter__(self) -> "PolicyClient":
self._client.__enter__()
return self
def __exit__(self, *exc_details: Any) -> None:
self._client.__exit__(*exc_details)
| [
"[email protected]"
]
| |
18524e6801d0308155e4a34d799b0733c105b938 | b27e5b8760a719d264a0be81d6e7097bd0708963 | /UserDev/EventDisplay/python/pyqtgraph/widgets/TableWidget.py | 9b9dcc49dc3bb07fd742ca831fb9d2ee3ec33a3d | []
| no_license | davidc1/gallery-framework | 8b11540546912bd79e8f35b34faea4470844203a | 27fd97cbc156d76ec682e5a207a14527c7cce957 | refs/heads/master | 2020-03-29T00:05:22.018380 | 2019-07-30T14:35:20 | 2019-07-30T14:35:20 | 149,324,237 | 3 | 1 | null | 2023-08-10T09:01:16 | 2018-09-18T17:06:23 | Python | UTF-8 | Python | false | false | 18,117 | py | # -*- coding: utf-8 -*-
import numpy as np
from ..Qt import QtGui, QtCore
from ..python2_3 import asUnicode, basestring
from .. import metaarray
__all__ = ['TableWidget']
def _defersort(fn):
def defersort(self, *args, **kwds):
# may be called recursively; only the first call needs to block sorting
setSorting = False
if self._sorting is None:
self._sorting = self.isSortingEnabled()
setSorting = True
self.setSortingEnabled(False)
try:
return fn(self, *args, **kwds)
finally:
if setSorting:
self.setSortingEnabled(self._sorting)
self._sorting = None
return defersort
class TableWidget(QtGui.QTableWidget):
"""Extends QTableWidget with some useful functions for automatic data handling
and copy / export context menu. Can automatically format and display a variety
of data types (see :func:`setData() <pyqtgraph.TableWidget.setData>` for more
information.
"""
def __init__(self, *args, **kwds):
"""
All positional arguments are passed to QTableWidget.__init__().
===================== =================================================
**Keyword Arguments**
editable (bool) If True, cells in the table can be edited
by the user. Default is False.
sortable (bool) If True, the table may be soted by
clicking on column headers. Note that this also
causes rows to appear initially shuffled until
a sort column is selected. Default is True.
*(added in version 0.9.9)*
===================== =================================================
"""
QtGui.QTableWidget.__init__(self, *args)
self.itemClass = TableWidgetItem
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setSelectionMode(QtGui.QAbstractItemView.ContiguousSelection)
self.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
self.clear()
kwds.setdefault('sortable', True)
kwds.setdefault('editable', False)
self.setEditable(kwds.pop('editable'))
self.setSortingEnabled(kwds.pop('sortable'))
if len(kwds) > 0:
raise TypeError("Invalid keyword arguments '%s'" % kwds.keys())
self._sorting = None # used when temporarily disabling sorting
self._formats = {None: None} # stores per-column formats and entire table format
self.sortModes = {} # stores per-column sort mode
self.itemChanged.connect(self.handleItemChanged)
self.contextMenu = QtGui.QMenu()
self.contextMenu.addAction('Copy Selection').triggered.connect(self.copySel)
self.contextMenu.addAction('Copy All').triggered.connect(self.copyAll)
self.contextMenu.addAction('Save Selection').triggered.connect(self.saveSel)
self.contextMenu.addAction('Save All').triggered.connect(self.saveAll)
def clear(self):
"""Clear all contents from the table."""
QtGui.QTableWidget.clear(self)
self.verticalHeadersSet = False
self.horizontalHeadersSet = False
self.items = []
self.setRowCount(0)
self.setColumnCount(0)
self.sortModes = {}
def setData(self, data):
"""Set the data displayed in the table.
Allowed formats are:
* numpy arrays
* numpy record arrays
* metaarrays
* list-of-lists [[1,2,3], [4,5,6]]
* dict-of-lists {'x': [1,2,3], 'y': [4,5,6]}
* list-of-dicts [{'x': 1, 'y': 4}, {'x': 2, 'y': 5}, ...]
"""
self.clear()
self.appendData(data)
self.resizeColumnsToContents()
@_defersort
def appendData(self, data):
"""
Add new rows to the table.
See :func:`setData() <pyqtgraph.TableWidget.setData>` for accepted
data types.
"""
startRow = self.rowCount()
fn0, header0 = self.iteratorFn(data)
if fn0 is None:
self.clear()
return
it0 = fn0(data)
try:
first = next(it0)
except StopIteration:
return
fn1, header1 = self.iteratorFn(first)
if fn1 is None:
self.clear()
return
firstVals = [x for x in fn1(first)]
self.setColumnCount(len(firstVals))
if not self.verticalHeadersSet and header0 is not None:
labels = [self.verticalHeaderItem(i).text() for i in range(self.rowCount())]
self.setRowCount(startRow + len(header0))
self.setVerticalHeaderLabels(labels + header0)
self.verticalHeadersSet = True
if not self.horizontalHeadersSet and header1 is not None:
self.setHorizontalHeaderLabels(header1)
self.horizontalHeadersSet = True
i = startRow
self.setRow(i, firstVals)
for row in it0:
i += 1
self.setRow(i, [x for x in fn1(row)])
if self._sorting and self.horizontalHeader().sortIndicatorSection() >= self.columnCount():
self.sortByColumn(0, QtCore.Qt.AscendingOrder)
def setEditable(self, editable=True):
self.editable = editable
for item in self.items:
item.setEditable(editable)
def setFormat(self, format, column=None):
"""
Specify the default text formatting for the entire table, or for a
single column if *column* is specified.
If a string is specified, it is used as a format string for converting
float values (and all other types are converted using str). If a
function is specified, it will be called with the item as its only
argument and must return a string. Setting format = None causes the
default formatter to be used instead.
Added in version 0.9.9.
"""
if format is not None and not isinstance(format, basestring) and not callable(format):
raise ValueError("Format argument must string, callable, or None. (got %s)" % format)
self._formats[column] = format
if column is None:
# update format of all items that do not have a column format
# specified
for c in range(self.columnCount()):
if self._formats.get(c, None) is None:
for r in range(self.rowCount()):
item = self.item(r, c)
if item is None:
continue
item.setFormat(format)
else:
# set all items in the column to use this format, or the default
# table format if None was specified.
if format is None:
format = self._formats[None]
for r in range(self.rowCount()):
item = self.item(r, column)
if item is None:
continue
item.setFormat(format)
def iteratorFn(self, data):
## Return 1) a function that will provide an iterator for data and 2) a list of header strings
if isinstance(data, list) or isinstance(data, tuple):
return lambda d: d.__iter__(), None
elif isinstance(data, dict):
return lambda d: iter(d.values()), list(map(asUnicode, data.keys()))
elif (hasattr(data, 'implements') and data.implements('MetaArray')):
if data.axisHasColumns(0):
header = [asUnicode(data.columnName(0, i)) for i in range(data.shape[0])]
elif data.axisHasValues(0):
header = list(map(asUnicode, data.xvals(0)))
else:
header = None
return self.iterFirstAxis, header
elif isinstance(data, np.ndarray):
return self.iterFirstAxis, None
elif isinstance(data, np.void):
return self.iterate, list(map(asUnicode, data.dtype.names))
elif data is None:
return (None,None)
else:
msg = "Don't know how to iterate over data type: {!s}".format(type(data))
raise TypeError(msg)
def iterFirstAxis(self, data):
for i in range(data.shape[0]):
yield data[i]
def iterate(self, data):
# for numpy.void, which can be iterated but mysteriously
# has no __iter__ (??)
for x in data:
yield x
def appendRow(self, data):
self.appendData([data])
@_defersort
def addRow(self, vals):
row = self.rowCount()
self.setRowCount(row + 1)
self.setRow(row, vals)
@_defersort
def setRow(self, row, vals):
if row > self.rowCount() - 1:
self.setRowCount(row + 1)
for col in range(len(vals)):
val = vals[col]
item = self.itemClass(val, row)
item.setEditable(self.editable)
sortMode = self.sortModes.get(col, None)
if sortMode is not None:
item.setSortMode(sortMode)
format = self._formats.get(col, self._formats[None])
item.setFormat(format)
self.items.append(item)
self.setItem(row, col, item)
item.setValue(val) # Required--the text-change callback is invoked
# when we call setItem.
def setSortMode(self, column, mode):
"""
Set the mode used to sort *column*.
============== ========================================================
**Sort Modes**
value Compares item.value if available; falls back to text
comparison.
text Compares item.text()
index Compares by the order in which items were inserted.
============== ========================================================
Added in version 0.9.9
"""
for r in range(self.rowCount()):
item = self.item(r, column)
if hasattr(item, 'setSortMode'):
item.setSortMode(mode)
self.sortModes[column] = mode
def sizeHint(self):
# based on http://stackoverflow.com/a/7195443/54056
width = sum(self.columnWidth(i) for i in range(self.columnCount()))
width += self.verticalHeader().sizeHint().width()
width += self.verticalScrollBar().sizeHint().width()
width += self.frameWidth() * 2
height = sum(self.rowHeight(i) for i in range(self.rowCount()))
height += self.verticalHeader().sizeHint().height()
height += self.horizontalScrollBar().sizeHint().height()
return QtCore.QSize(width, height)
def serialize(self, useSelection=False):
"""Convert entire table (or just selected area) into tab-separated text values"""
if useSelection:
selection = self.selectedRanges()[0]
rows = list(range(selection.topRow(),
selection.bottomRow() + 1))
columns = list(range(selection.leftColumn(),
selection.rightColumn() + 1))
else:
rows = list(range(self.rowCount()))
columns = list(range(self.columnCount()))
data = []
if self.horizontalHeadersSet:
row = []
if self.verticalHeadersSet:
row.append(asUnicode(''))
for c in columns:
row.append(asUnicode(self.horizontalHeaderItem(c).text()))
data.append(row)
for r in rows:
row = []
if self.verticalHeadersSet:
row.append(asUnicode(self.verticalHeaderItem(r).text()))
for c in columns:
item = self.item(r, c)
if item is not None:
row.append(asUnicode(item.value))
else:
row.append(asUnicode(''))
data.append(row)
s = ''
for row in data:
s += ('\t'.join(row) + '\n')
return s
def copySel(self):
"""Copy selected data to clipboard."""
QtGui.QApplication.clipboard().setText(self.serialize(useSelection=True))
def copyAll(self):
"""Copy all data to clipboard."""
QtGui.QApplication.clipboard().setText(self.serialize(useSelection=False))
def saveSel(self):
"""Save selected data to file."""
self.save(self.serialize(useSelection=True))
def saveAll(self):
"""Save all data to file."""
self.save(self.serialize(useSelection=False))
def save(self, data):
fileName = QtGui.QFileDialog.getSaveFileName(self, "Save As..", "", "Tab-separated values (*.tsv)")
if fileName == '':
return
open(fileName, 'w').write(data)
def contextMenuEvent(self, ev):
self.contextMenu.popup(ev.globalPos())
def keyPressEvent(self, ev):
if ev.text() == 'c' and ev.modifiers() == QtCore.Qt.ControlModifier:
ev.accept()
self.copy()
else:
ev.ignore()
def handleItemChanged(self, item):
item.itemChanged()
class TableWidgetItem(QtGui.QTableWidgetItem):
def __init__(self, val, index, format=None):
QtGui.QTableWidgetItem.__init__(self, '')
self._blockValueChange = False
self._format = None
self._defaultFormat = '%0.3g'
self.sortMode = 'value'
self.index = index
flags = QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled
self.setFlags(flags)
self.setValue(val)
self.setFormat(format)
def setEditable(self, editable):
"""
Set whether this item is user-editable.
"""
if editable:
self.setFlags(self.flags() | QtCore.Qt.ItemIsEditable)
else:
self.setFlags(self.flags() & ~QtCore.Qt.ItemIsEditable)
def setSortMode(self, mode):
"""
Set the mode used to sort this item against others in its column.
============== ========================================================
**Sort Modes**
value Compares item.value if available; falls back to text
comparison.
text Compares item.text()
index Compares by the order in which items were inserted.
============== ========================================================
"""
modes = ('value', 'text', 'index', None)
if mode not in modes:
raise ValueError('Sort mode must be one of %s' % str(modes))
self.sortMode = mode
def setFormat(self, fmt):
"""Define the conversion from item value to displayed text.
If a string is specified, it is used as a format string for converting
float values (and all other types are converted using str). If a
function is specified, it will be called with the item as its only
argument and must return a string.
Added in version 0.9.9.
"""
if fmt is not None and not isinstance(fmt, basestring) and not callable(fmt):
raise ValueError("Format argument must string, callable, or None. (got %s)" % fmt)
self._format = fmt
self._updateText()
def _updateText(self):
self._blockValueChange = True
try:
self._text = self.format()
self.setText(self._text)
finally:
self._blockValueChange = False
def setValue(self, value):
self.value = value
self._updateText()
def itemChanged(self):
"""Called when the data of this item has changed."""
if self.text() != self._text:
self.textChanged()
def textChanged(self):
"""Called when this item's text has changed for any reason."""
self._text = self.text()
if self._blockValueChange:
# text change was result of value or format change; do not
# propagate.
return
try:
self.value = type(self.value)(self.text())
except ValueError:
self.value = str(self.text())
def format(self):
if callable(self._format):
return self._format(self)
if isinstance(self.value, (float, np.floating)):
if self._format is None:
return self._defaultFormat % self.value
else:
return self._format % self.value
else:
return asUnicode(self.value)
def __lt__(self, other):
if self.sortMode == 'index' and hasattr(other, 'index'):
return self.index < other.index
if self.sortMode == 'value' and hasattr(other, 'value'):
return self.value < other.value
else:
return self.text() < other.text()
if __name__ == '__main__':
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
t = TableWidget()
win.setCentralWidget(t)
win.resize(800,600)
win.show()
ll = [[1,2,3,4,5]] * 20
ld = [{'x': 1, 'y': 2, 'z': 3}] * 20
dl = {'x': list(range(20)), 'y': list(range(20)), 'z': list(range(20))}
a = np.ones((20, 5))
ra = np.ones((20,), dtype=[('x', int), ('y', int), ('z', int)])
t.setData(ll)
ma = metaarray.MetaArray(np.ones((20, 3)), info=[
{'values': np.linspace(1, 5, 20)},
{'cols': [
{'name': 'x'},
{'name': 'y'},
{'name': 'z'},
]}
])
t.setData(ma)
| [
"[email protected]"
]
| |
6757af7cac4bdda23747b34a7f1f13a843f4ffea | 8ce656578e04369cea75c81b529b977fb1d58d94 | /bank_guarantee/migrations/0037_auto_20200221_1322.py | f40f4f6609c6fc8c2adbea5af7ea5f848ca4d2e0 | []
| no_license | JJvzd/django_exp | f9a08c40a6a7535777a8b5005daafe581d8fe1dc | b1df4681e67aad49a1ce6426682df66b81465cb6 | refs/heads/master | 2023-05-31T13:21:24.178394 | 2021-06-22T10:19:43 | 2021-06-22T10:19:43 | 379,227,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | # Generated by Django 2.1.7 on 2020-02-21 10:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bank_guarantee', '0036_auto_20200131_0005'),
]
operations = [
migrations.AlterField(
model_name='requeststatus',
name='code',
field=models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='Код'),
),
]
| [
"[email protected]"
]
| |
938b1412931f54fefe25078052ee0bc92effebf3 | 9cd180fc7594eb018c41f0bf0b54548741fd33ba | /sdk/python/pulumi_azure_nextgen/storage/v20181101/get_blob_container_immutability_policy.py | ec61eb93d9aab5985bfca2abc2f0d65f0e8c4e2e | [
"Apache-2.0",
"BSD-3-Clause"
]
| permissive | MisinformedDNA/pulumi-azure-nextgen | c71971359450d03f13a53645171f621e200fe82d | f0022686b655c2b0744a9f47915aadaa183eed3b | refs/heads/master | 2022-12-17T22:27:37.916546 | 2020-09-28T16:03:59 | 2020-09-28T16:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,604 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetBlobContainerImmutabilityPolicyResult',
'AwaitableGetBlobContainerImmutabilityPolicyResult',
'get_blob_container_immutability_policy',
]
@pulumi.output_type
class GetBlobContainerImmutabilityPolicyResult:
"""
The ImmutabilityPolicy property of a blob container, including Id, resource name, resource type, Etag.
"""
def __init__(__self__, etag=None, immutability_period_since_creation_in_days=None, name=None, state=None, type=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if immutability_period_since_creation_in_days and not isinstance(immutability_period_since_creation_in_days, int):
raise TypeError("Expected argument 'immutability_period_since_creation_in_days' to be a int")
pulumi.set(__self__, "immutability_period_since_creation_in_days", immutability_period_since_creation_in_days)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def etag(self) -> str:
"""
Resource Etag.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="immutabilityPeriodSinceCreationInDays")
def immutability_period_since_creation_in_days(self) -> int:
"""
The immutability period for the blobs in the container since the policy creation, in days.
"""
return pulumi.get(self, "immutability_period_since_creation_in_days")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
"""
return pulumi.get(self, "type")
class AwaitableGetBlobContainerImmutabilityPolicyResult(GetBlobContainerImmutabilityPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBlobContainerImmutabilityPolicyResult(
etag=self.etag,
immutability_period_since_creation_in_days=self.immutability_period_since_creation_in_days,
name=self.name,
state=self.state,
type=self.type)
def get_blob_container_immutability_policy(account_name: Optional[str] = None,
container_name: Optional[str] = None,
immutability_policy_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBlobContainerImmutabilityPolicyResult:
"""
Use this data source to access information about an existing resource.
:param str account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only.
:param str container_name: The name of the blob container within the specified storage account. Blob container names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number.
:param str immutability_policy_name: The name of the blob container immutabilityPolicy within the specified storage account. ImmutabilityPolicy Name must be 'default'
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['containerName'] = container_name
__args__['immutabilityPolicyName'] = immutability_policy_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:storage/v20181101:getBlobContainerImmutabilityPolicy', __args__, opts=opts, typ=GetBlobContainerImmutabilityPolicyResult).value
return AwaitableGetBlobContainerImmutabilityPolicyResult(
etag=__ret__.etag,
immutability_period_since_creation_in_days=__ret__.immutability_period_since_creation_in_days,
name=__ret__.name,
state=__ret__.state,
type=__ret__.type)
| [
"[email protected]"
]
| |
29063ff6540ddec2aa06c9e35f1bc7a3b64d8b2e | 8a3e7b779676e396853dc1fb22525e501050cffb | /geoist/vis/gui.py | 118bd9e8fa0d14501a4eb9f0a7269d00960258ab | [
"MIT"
]
| permissive | CHEN-Zhaohui/geoist | 3a8218105b8bd21d23f3e15e3d20397adf8f571d | 06a00db3e0ed3d92abf3e45b7b3bfbef6a858a5b | refs/heads/master | 2021-03-31T19:19:04.472355 | 2020-03-18T03:18:04 | 2020-03-18T03:18:04 | 248,126,521 | 0 | 0 | MIT | 2020-03-18T03:07:54 | 2020-03-18T03:07:53 | null | UTF-8 | Python | false | false | 4,325 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Aug 10 18:25:58 2019
@author: chens
"""
from tkinter import *
import os
#from simpledialog import simpledialog
import gimodule
gimodule.maxwidth = 140
# Since the interface now has to columns of buttons this must be wider
## Constants
programname = "MagTools APIs - Geomagnetic reference field models"
version = "0.1"
## Starting the program and creating classes
class App:
def __init__(self, master):
frame = Frame(master)
frame.grid()
mainLabel = gimodule.mainline(frame, "定向钻探(NWD)参考地磁场模型计算接口程序", version)
gimodule.seperator_line(frame,10, "获得地磁场信息")
covfit = gimodule.LauncherButton(frame,11,18,0,"最近地磁台",
lambda:[covfit.run("covfit.py")], "API: http://0.0.0.0/magv1/nearestMagSta?")
empcov = gimodule.LauncherButton(frame,11,18,1,"指定范围地磁台", lambda:
[empcov.run("empcov.py")], "API: http://0.0.0.0/magv1/selMagSta?")
geocol = gimodule.LauncherButton(frame,12,18,0,"地磁分量转换", lambda:
[geocol.run("geocol.py")], "API: xyz2hdi/hdi2xyz")
tc = gimodule.LauncherButton(frame,12,18,1,"模型解算最优时变", lambda:
[tc.run("tc.py")], "API: magts")
gimodule.seperator_line(frame,40, "主磁场+岩石圈磁场:EMM2015地磁模型接口")
geogrid = gimodule.LauncherButton(frame,41,18,0,"模型解算单点",
lambda: [geogrid.run("geogrid.py")], "API: emmpnt ")
geoip = gimodule.LauncherButton(frame,41,18,1,"模型解算网格", lambda:
[geoip.run("geoip.py")], "API: emmgrd")
geoegm = gimodule.LauncherButton(frame,42,18,0,"模型解算时间序列",
lambda:[geoegm.run("geoegm.py")], "API: emmts")
stokes = gimodule.LauncherButton(frame,42,18,1,"模型解算多点", lambda:
[stokes.run("stokes.py")], "API: emmpnts")
gimodule.seperator_line(frame,70, "主磁场1:IGRF12地磁模型接口")
geogrid = gimodule.LauncherButton(frame,71,18,0,"模型解算单点",
lambda: [geogrid.run("geogrid.py")], "API: igrfpnt ")
geoip = gimodule.LauncherButton(frame,71,18,1,"模型解算网格", lambda:
[geoip.run("geoip.py")], "API: igrfgrd")
geoegm = gimodule.LauncherButton(frame,72,18,0,"模型解算时间序列",
lambda:[geoegm.run("geoegm.py")], "API: igrfts")
stokes = gimodule.LauncherButton(frame,72,18,1,"模型解算多点", lambda:
[stokes.run("stokes.py")], "API: igrfpnts")
gimodule.seperator_line(frame,100, "主磁场2:WMM2015地磁模型接口")
geogrid = gimodule.LauncherButton(frame,101,18,0,"模型解算单点",
lambda: [geogrid.run("geogrid.py")], "API: wmmpnt ")
geoip = gimodule.LauncherButton(frame,101,18,1,"模型解算网格", lambda:
[geoip.run("geoip.py")], "API: wmmgrd")
geoegm = gimodule.LauncherButton(frame,102,18,0,"模型解算时间序列",
lambda:[geoegm.run("geoegm.py")], "API: wmmts")
stokes = gimodule.LauncherButton(frame,102,18,1,"模型解算多点", lambda:
[stokes.run("stokes.py")], "API: wmmpnts")
gimodule.seperator_line(frame,130, "电离层磁场:DIFI-4地磁模型接口")
geogrid = gimodule.LauncherButton(frame,131,18,0,"模型解算单点",
lambda: [geogrid.run("geogrid.py")], "API: difipnt ")
geoip = gimodule.LauncherButton(frame,131,18,1,"模型解算网格", lambda:
[geoip.run("geoip.py")], "API: difigrd")
geoegm = gimodule.LauncherButton(frame,132,18,0,"模型解算时间序列",
lambda:[geoegm.run("geoegm.py")], "API: dififts")
stokes = gimodule.LauncherButton(frame,132,18,1,"模型解算多点", lambda:
[stokes.run("stokes.py")], "API: difipnts")
gimodule.seperator_line(frame,gimodule.maxrow-2)
button = Button(frame, text="退出", width=8, command=frame.quit)
button.grid(row=gimodule.maxrow, column=0, sticky=W)
######################################################
## Initiate the program and start program loop
######################################################
root = Tk()
app = App(root)
root.title(programname)
root.mainloop() | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.