blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a12343947c99a0584b18996596487918113884d1 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/1360455/snippet.py | ff62eb5f770ed285b9b8fdc6e6f331c6b6e4e651 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 3,727 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Done under Visual Studio 2010 using the excelent Python Tools for Visual Studio
# http://pytools.codeplex.com/
#
# Article on ideas vs execution at: http://blog.databigbang.com/ideas-and-execution-magic-chart/
import urllib2
import json
from datetime import datetime
from time import mktime
import csv
import codecs
import cStringIO
class CSVUnicodeWriter: # http://docs.python.org/library/csv.html
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
def get_hackernews_articles_with_idea_in_the_title():
endpoint = 'http://api.thriftdb.com/api.hnsearch.com/items/_search?filter[fields][title]=idea&start={0}&limit={1}&sortby=map(ms(create_ts),{2},{3},4294967295000)%20asc'
incomplete_iso_8601_format = '%Y-%m-%dT%H:%M:%SZ'
items = {}
start = 0
limit = 100
begin_range = 0
end_range = 0
url = endpoint.format(start, limit, begin_range, str(int(end_range)))
response = urllib2.urlopen(url).read()
data = json.loads(response)
prev_timestamp = datetime.fromtimestamp(0)
results = data['results']
while results:
for e in data['results']:
_id = e['item']['id']
title = e['item']['title']
points = e['item']['points']
num_comments = e['item']['num_comments']
timestamp = datetime.strptime(e['item']['create_ts'], incomplete_iso_8601_format)
#if timestamp < prev_timestamp: # The results are not correctly sorted. We can't rely on this one. if _id in items: # If the circle is complete. return items prev_timestamp = timestamp items[_id] = {'id':_id, 'title':title, 'points':points, 'num_comments':num_comments, 'timestamp':timestamp} title_utf8 = title.encode('utf-8') print title_utf8, timestamp, _id, points, num_comments start += len(results) if start + limit > 1000:
start = 0
end_range = mktime(timestamp.timetuple())*1000
url = endpoint.format(start, limit, begin_range, str(int(end_range))) # if not str(int(x)) then a float gives in the sci math form: '1.24267528e+12'
response = urllib2.urlopen(url).read()
data = json.loads(response)
results = data['results']
return items
if __name__ == '__main__':
items = get_hackernews_articles_with_idea_in_the_title()
with open('hn-articles.csv', 'wb') as f:
hn_articles = CSVUnicodeWriter(f)
hn_articles.writerow(['ID', 'Timestamp', 'Title', 'Points', '# Comments'])
for k,e in items.items():
hn_articles.writerow([str(e['id']), str(e['timestamp']), e['title'], str(e['points']), str(e['num_comments'])])
# It returns 3706 articles where the query says that they are 3711... find the bug... | [
"[email protected]"
] | |
9e427939fee2e4d3f52f2a70e6743b49bcc4d34e | cc2fcc1a0c5ea9789f98ec97614d7b25b03ba101 | /st2tests/integration/mistral/test_errors.py | 3280859646406164d582cf4022c8c414ea41ca1f | [
"Apache-2.0"
] | permissive | Junsheng-Wu/st2 | 6451808da7de84798641882ca202c3d1688f8ba8 | c3cdf657f7008095f3c68b4132b9fe76d2f52d81 | refs/heads/master | 2022-04-30T21:32:44.039258 | 2020-03-03T07:03:57 | 2020-03-03T07:03:57 | 244,301,363 | 0 | 0 | Apache-2.0 | 2022-03-29T22:04:26 | 2020-03-02T06:53:58 | Python | UTF-8 | Python | false | false | 6,469 | py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from integration.mistral import base
class ExceptionHandlingTest(base.TestWorkflowExecution):
def test_bad_workflow(self):
with self.assertRaises(Exception) as t:
self._execute_workflow('examples.mistral-foobar', {})
self.assertIn('Action "examples.mistral-foobar" cannot be found', t.exception.message)
def test_bad_action(self):
execution = self._execute_workflow('examples.mistral-error-bad-action', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution)
self.assertIn('Failed to find action', execution.result['extra']['state_info'])
def test_bad_wf_arg(self):
execution = self._execute_workflow('examples.mistral-error-bad-wf-arg', {})
execution = self._wait_for_completion(
execution,
expect_tasks=False,
expect_tasks_completed=False
)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Invalid input', execution.result['extra']['state_info'])
def test_bad_task_transition(self):
execution = self._execute_workflow('examples.mistral-error-bad-task-transition', {})
execution = self._wait_for_completion(
execution,
expect_tasks=False,
expect_tasks_completed=False
)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn("Task 'task3' not found", execution.result['error'])
def test_bad_with_items(self):
execution = self._execute_workflow('examples.mistral-error-bad-with-items', {})
execution = self._wait_for_completion(execution, expect_tasks=False)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Wrong input format', execution.result['extra']['state_info'])
def test_bad_expr_yaql(self):
execution = self._execute_workflow('examples.mistral-test-yaql-bad-expr', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate YAQL expression', execution.result['extra']['state_info'])
def test_bad_publish_yaql(self):
execution = self._execute_workflow('examples.mistral-test-yaql-bad-publish', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate YAQL expression', execution.result['extra']['state_info'])
def test_bad_subworkflow_input_yaql(self):
execution = self._execute_workflow('examples.mistral-test-yaql-bad-subworkflow-input', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate YAQL expression', execution.result['extra']['state_info'])
def test_bad_task_transition_yaql(self):
execution = self._execute_workflow('examples.mistral-test-yaql-bad-task-transition', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate YAQL expression', execution.result['extra']['state_info'])
def test_bad_with_items_yaql(self):
execution = self._execute_workflow('examples.mistral-test-yaql-bad-with-items', {})
execution = self._wait_for_completion(execution, expect_tasks=False)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate YAQL expression', execution.result['extra']['state_info'])
def test_bad_expr_jinja(self):
execution = self._execute_workflow('examples.mistral-test-jinja-bad-expr', {})
execution = self._wait_for_completion(execution, expect_tasks=False)
self._assert_failure(execution, expect_tasks_failure=False)
# TODO: Currently, Mistral returns "UndefinedError ContextView object has no attribute".
# Need to fix Mistral to return "Cannot evaulate Jinja expression."
# self.assertIn('Can not evaluate Jinja expression',
# execution.result['extra']['state_info'])
def test_bad_publish_jinja(self):
execution = self._execute_workflow('examples.mistral-test-jinja-bad-publish', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate Jinja expression', execution.result['extra']['state_info'])
def test_bad_subworkflow_input_jinja(self):
execution = self._execute_workflow('examples.mistral-test-jinja-bad-subworkflow-input', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate Jinja expression', execution.result['extra']['state_info'])
def test_bad_task_transition_jinja(self):
execution = self._execute_workflow('examples.mistral-test-jinja-bad-task-transition', {})
execution = self._wait_for_completion(execution)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate Jinja expression', execution.result['extra']['state_info'])
def test_bad_with_items_jinja(self):
execution = self._execute_workflow('examples.mistral-test-jinja-bad-with-items', {})
execution = self._wait_for_completion(execution, expect_tasks=False)
self._assert_failure(execution, expect_tasks_failure=False)
self.assertIn('Can not evaluate Jinja expression', execution.result['extra']['state_info'])
| [
"[email protected]"
] | |
23c0dd25543411644e979a4ed4368b85c6f49098 | 4dbaea97b6b6ba4f94f8996b60734888b163f69a | /LeetCode/8.py | 15f59ed2df0e448995c3a574ba4fa386c04f4725 | [] | no_license | Ph0en1xGSeek/ACM | 099954dedfccd6e87767acb5d39780d04932fc63 | b6730843ab0455ac72b857c0dff1094df0ae40f5 | refs/heads/master | 2022-10-25T09:15:41.614817 | 2022-10-04T12:17:11 | 2022-10-04T12:17:11 | 63,936,497 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | class Solution(object):
def myAtoi(self, str):
"""
:type str: str
:rtype: int
"""
import re
minus = 1
if len(str) == 0:
return 0
str = str.strip()
i = 0
while i < len(str):
if i == 0 and (str[i] == '-' or str[i] == '+') and minus == 1:
minus = -1
elif str[i] not in ['0','1','2','3','4','5','6','7','8','9']:
break
i += 1
if i == 0 or (i == 1 and minus == -1):
return 0
res = int(str[0:i])
res = min(res, 2147483647)
res = max(res, -2147483648)
return res | [
"[email protected]"
] | |
1f29a592c39022e79242a176b8638f31728d0fba | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_207/190.py | 4ea85e66ef60f663dfa02f1f700dbd13bd15454c | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,505 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from heapq import *
def read_ints():
return list(map(int, input().split()))
def solve(t):
N, r, o, y, g, b, v = read_ints()
if r == g != 0:
if o or y or b or v:
print('Case #{}: IMPOSSIBLE'.format(t))
else:
print('Case #{}: {}'.format(t, 'RG'*r))
return
if y == v != 0:
if r or o or g or b:
print('Case #{}: IMPOSSIBLE'.format(t))
else:
print('Case #{}: {}'.format(t, 'VY'*y))
return
if b == o != 0:
if r or y or g or v:
print('Case #{}: IMPOSSIBLE'.format(t))
else:
print('Case #{}: {}'.format(t, 'OB'*b))
return
r -= g
y -= v
b -= o
if r < 0 or y < 0 or b < 0:
print('Case #{}: IMPOSSIBLE'.format(t))
return
M = max(r, y, b)
h = [(-r, r != M, 'R'), (-y, y != M, 'Y'), (-b, b != M, 'B')]
heapify(h)
res = ''
count, _prio, ch = heappop(h)
while count < 0:
res += ch
count, _prio, ch = heapreplace(h, (count + 1, _prio, ch))
if res[-1] != res[0] and all(count == 0 for count, *_ in h):
res = res.replace('R', 'RG'*g + 'R', 1)
res = res.replace('Y', 'YV'*v + 'Y', 1)
res = res.replace('B', 'BO'*o + 'B', 1)
print('Case #{}: {}'.format(t, res))
else:
print('Case #{}: IMPOSSIBLE'.format(t))
if __name__ == "__main__":
for t in range(1, int(input())+1):
solve(t)
| [
"[email protected]"
] | |
fd95d5fbefacb5b37e09b549986f43d521ae44a2 | 21fec19cb8f74885cf8b59e7b07d1cd659735f6c | /chapter_8/dlg-custom.py | b1338fb1bb4b149b6737cc31b65a691d7ecc67ba | [
"MIT"
] | permissive | bimri/programming_python | ec77e875b9393179fdfb6cbc792b3babbdf7efbe | ba52ccd18b9b4e6c5387bf4032f381ae816b5e77 | refs/heads/master | 2023-09-02T12:21:11.898011 | 2021-10-26T22:32:34 | 2021-10-26T22:32:34 | 394,783,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,298 | py | "Custom Dialogs"
'''
Custom dialogs support arbitrary interfaces, but they are also the most complicated to
program. Even so, there’s not much to it—simply create a pop-up window as a
Toplevel with attached widgets, and arrange a callback handler to fetch user inputs
entered in the dialog (if any) and to destroy the window.
'''
import sys
from tkinter import *
makemodal = (len(sys.argv) > 1)
def dialog():
win = Toplevel() # make a new window
Label(win, text='Hard drive reformatted!').pack() # add a few widgets
Button(win, text='OK', command=win.destroy).pack() # set destroy callback
if makemodal:
win.focus_set() # take over input focus,
win.grab_set() # disable other windows while I'm open,
win.wait_window # and wait here until win destroyed
print('dialog exit') # else returns right away
root = Tk()
Button(root, text='popup', command=dialog).pack()
root.mainloop()
'''
Because dialogs are nonmodal in this mode, the
root window remains active after a dialog is popped up. In fact, nonmodal dialogs never
block other windows, so you can keep pressing the root’s button to generate as many
copies of the pop-up window as will fit on your screen.
'''
| [
"[email protected]"
] | |
1ced0778202d32bf5b35354803964d6939afc6ea | 9ac35a2327ca9fddcf55077be58a1babffd23bdd | /cadence/tests/test_errors.py | 6921b0a8d11e06f2d032e6cc1b4e6d0ef653cd7c | [
"MIT"
] | permissive | meetchandan/cadence-python | f1eb987c135f620607a62495096a89494216d847 | cfd7a48e6da7c289c9ae0c29c94d12d2b05986e4 | refs/heads/master | 2022-12-14T12:46:32.364375 | 2020-09-16T15:50:55 | 2020-09-16T15:50:55 | 260,763,097 | 1 | 0 | MIT | 2020-09-16T15:48:14 | 2020-05-02T19:47:56 | Python | UTF-8 | Python | false | false | 1,347 | py | from unittest import TestCase
from cadence.errors import find_error, InternalServiceError, WorkflowExecutionAlreadyStartedError
from cadence.thrift import cadence_thrift
class TestError(TestCase):
def setUp(self) -> None:
self.internalServiceError = cadence_thrift.shared.InternalServiceError("ERROR")
self.sessionAlreadyExistError = cadence_thrift.shared.WorkflowExecutionAlreadyStartedError("ERROR", "REQUEST-ID",
"RUN-ID")
def test_internal_server_error(self):
response = cadence_thrift.WorkflowService.StartWorkflowExecution.response(
internalServiceError=self.internalServiceError)
error = find_error(response)
self.assertIsInstance(error, InternalServiceError)
self.assertEqual("ERROR", error.message)
def test_session_already_exists_error(self):
response = cadence_thrift.WorkflowService.StartWorkflowExecution.response(
sessionAlreadyExistError=self.sessionAlreadyExistError)
error = find_error(response)
self.assertIsInstance(error, WorkflowExecutionAlreadyStartedError)
self.assertEqual("ERROR", error.message)
self.assertEqual("REQUEST-ID", error.start_request_id)
self.assertEqual("RUN-ID", error.run_id)
| [
"[email protected]"
] | |
bf15a0134c6b3e379d9901b3901eb79bfb8cefa4 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/9/xm5.py | 38c66cb97e0801b2ac4684ce92a27b4b9fd0b4e8 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'xm5':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
47dd4b0d0b97967cfa1f6829d045d33383c9b932 | 96796bca1f00c5af89c695ff51691e977fda262c | /myEnvironments/multipleApps/multipleApps/urls.py | 1e1daa59868f00d10f30a34bb8adb6c29c2d563a | [] | no_license | LexiPearl/Python-Projects | 5be7ecb11ff7e332daf7b92d23e183511b67444c | c76ce5611d8abd8dfcdea24051cbdfe705a98ffd | refs/heads/master | 2021-01-19T11:35:50.624237 | 2017-04-28T04:13:13 | 2017-04-28T04:13:13 | 87,978,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | """multipleApps URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^', include('apps.loginregistration.urls')),
url(r'^courses/users_courses/', include('apps.courses_users.urls')),
url(r'^courses/', include('apps.courses.urls')),
]
| [
"[email protected]"
] | |
6f93021be2e728eb052b23276ba667565f0f0bb7 | 872ea32f551c803ac497a38667dc272965246561 | /tensorflow_transform/gaussianization.py | 320acb6f67fcda13b616b72cb43fb36c878774ab | [
"Apache-2.0"
] | permissive | tensorflow/transform | 5c4d74c15e7a13ef0901816dfe35b0901d6cb1da | d2bfc2640137324dcad7f7be365e6c851c01f4e9 | refs/heads/master | 2023-08-31T21:54:54.222760 | 2023-08-15T22:45:45 | 2023-08-15T22:46:20 | 81,509,390 | 1,030 | 267 | Apache-2.0 | 2023-08-11T22:57:56 | 2017-02-10T00:36:53 | Python | UTF-8 | Python | false | false | 13,792 | py | # Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities used to compute parameters for gaussianization."""
import numpy as np
import tensorflow as tf
# The expressions to compute the first L-moments from the parameters of the
# Tukey HH distribution are taken from:
# Todd C. Headrick, and Mohan D. Pant. "Characterizing Tukey h and
# hh-Distributions through L-Moments and the L-Correlation," ISRN Applied
# Mathematics, vol. 2012, 2012. doi:10.5402/2012/980153
def tukey_hh_l_mean_and_scale(h_params):
"""Computes L-mean and L-scale for a Tukey HH distribution.
Args:
h_params: An np.array with dimension 2 on the first axis. The slice
h_params[0, ...] contains the left parameter of the distribution and
h_params[1, ...] the right parameter. Each entry h must in 0 <= h < 1.
Returns:
The tuple (L_mean, L_scale) containing the first two L-moments for the
given parameters. Each entry has the same shape as h_params, except for
the first axis, which is removed.
"""
one_div_sqrt2pi = 1.0 / np.sqrt(2.0 * np.pi)
hl = h_params[0, ...]
hr = h_params[1, ...]
dtype = h_params.dtype
l_1 = one_div_sqrt2pi * (1.0 / (hl - 1.0) + 1.0 / (1.0 - hr))
l_2 = one_div_sqrt2pi * (
(np.sqrt(2.0 - hl) + np.sqrt(2.0 - hr) - hl * np.sqrt(2.0 - hl) -
hr * np.sqrt(2 - hr)) /
((hl - 1.0) * (hr - 1.0) * np.sqrt((hl - 2.0) * (hr - 2.0))))
return (l_1.astype(dtype), l_2.astype(dtype))
def _tukey_hh_l_skewness_and_kurtosis(h_params):
"""Computes L-skewness and L-kurtosis for a Tukey HH distribution.
Args:
h_params: An np.array with dimension 2 on the first axis. The slice
h_params[0, ...] contains the left parameter of the distribution and
h_params[1, ...] the right parameter.
Returns:
The tuple (L_skewness, L_kurtosis) for the given parameters. Each entry
has the same shape as h_params, except for the first axis, which is
removed.
"""
def skewness_num(h1, h2):
return (12 * np.sqrt(2.0 - h1) * (h2 - 2.0) * (h2 - 1.0) *
np.arctan(1.0 / np.sqrt(2.0 - h1)))
def skewness_den(h):
return h * np.sqrt(2 - h) - np.sqrt(2 - h)
def kurtosis_den_part(h):
return h * np.sqrt(2.0 - h) - np.sqrt(2.0 - h)
hl = h_params[0, ...]
hr = h_params[1, ...]
dtype = h_params.dtype
skewness = (skewness_num(hl, hr) -
np.pi * (hl - hr) * (hl - 2.0) * (hr - 2.0) -
skewness_num(hr, hl)) / (
2 * np.pi * np.sqrt((hl - 2.0) * (hr - 2.0)) *
(skewness_den(hl) + skewness_den(hr)))
kurtosis_num_1 = (
hr * np.sqrt((hl - 4.0) * (hl - 2.0) * (hl - 1.0) * (hr - 2.0)) -
2.0 * np.sqrt((hl - 4.0) * (hl - 1.0)))
kurtosis_num_2 = (hl * (hl - 3.0) * np.sqrt((hl - 4.0) * (hl - 1.0)) +
np.sqrt((hl - 4.0) * (hl - 2.0) * (hl - 1.0) * (hr - 2.0)))
kurtosis_num_3 = (30.0 * (hl - 1.0) *
np.sqrt((hl - 4.0) * (hl - 2.0) * (hr - 2.0) / (hl - 1.0)) *
(hr - 1.0) * np.arctan(np.sqrt(1.0 + 2.0 / (hl - 4.0))))
kurtosis_num_4 = (30.0 * (hl - 2) *
np.sqrt((hl - 4.0) * (hl - 1.0)) * (hl - 1.0) *
np.arctan(np.sqrt(1.0 + 2.0 / (hr - 4.0))))
kurtosis_den = (np.pi * np.sqrt((4.0 - hl) * (2.0 - hl) * (1.0 - hl)) *
(kurtosis_den_part(hl) + kurtosis_den_part(hr)))
kurtosis = (6.0 * np.pi * (kurtosis_num_1 - kurtosis_num_2) +
kurtosis_num_3 + kurtosis_num_4) / kurtosis_den
return (skewness.astype(dtype), kurtosis.astype(dtype))
def _binary_search(error_fn, low_value, high_value):
"""Binary search for a function given start and end interval.
This is a simple binary search over the values of the function error_fn given
the interval [low_value, high_value]. We expect that the starting condition is
error_fn(low_value) < 0 and error_fn(high_value) > 0 and we bisect the
interval until the exit conditions are met. The result is the final interval
[low_value, high_value] that is normally much smaller than the initial one,
but still satisfying the starting condition.
Args:
error_fn: Function mapping values to errors.
low_value: Lower interval endpoint. We expect f(low_value) < 0.
high_value: Higher interval endpoint. We expect f(high_value) > 0.
Returns:
The final interval endpoints (low_value, high_value) after the sequence of
bisections.
"""
# Exit conditions.
stop_iter_step = 10 # Max number of iterations.
stop_error_step = 1e-6 # Minimum function variation.
stop_value_step = 1e-6 # Minimum variable variation.
current_iter = 0
while True:
current_value = (low_value + high_value) / 2.0
current_error = error_fn(current_value)
if current_error < 0.0:
low_value = current_value
else:
high_value = current_value
current_iter += 1
if (current_iter > stop_iter_step or
np.abs(current_error) < stop_error_step or
high_value - low_value < stop_value_step):
break
return low_value, high_value
def _params_to_errors(h, delta_h, l_skewness_and_kurtosis):
"""Maps parameters to errors on L-skewness and L-kurtosis.
Args:
h: Value of right parameter of the Tukey HH distribution.
delta_h: Different between right and left parameter of the Tukey HH
distribution.
l_skewness_and_kurtosis: np.array containing the target values of
L-skewness and L-kurtosis.
Returns:
An np.array containing the difference between the values of L-skewness and
L-kurtosis corresponding to the parameters hl = h - delta_h, hr =h and the
target values.
"""
dtype = l_skewness_and_kurtosis.dtype
h_params = np.array([h - delta_h, h], dtype=dtype)
current_l_skewness_and_kurtosis = np.array(
_tukey_hh_l_skewness_and_kurtosis(h_params), dtype=dtype)
return current_l_skewness_and_kurtosis - l_skewness_and_kurtosis
def compute_tukey_hh_params(l_skewness_and_kurtosis):
"""Computes the H paramesters of a Tukey HH distribution.
Given the L-skewness and L-kurtosis of a Tukey HH distribution we compute
the H parameters of the distribution.
Args:
l_skewness_and_kurtosis: A np.array with shape (2,) containing L-skewness
and L-kurtosis.
Returns:
An np.array with the same type and shape of the argument containing the
left and right H parameters of the distribution.
"""
# Exit conditions for the search loop.
stop_iter_step = 20 # Max number of iteration for the search loop.
stop_error_step = 1e-6 # Minimum function variation.
stop_value_step = 1e-6 # Minimum variable variation.
dtype = l_skewness_and_kurtosis.dtype
# Returns zero parameters (i.e. treat as gaussian) if L-kurtosis is smaller
# than for a gaussian.
result = np.zeros_like(l_skewness_and_kurtosis)
if l_skewness_and_kurtosis[1] < 0.1226017:
return result
# If L-skewness is negative, swap the parameters.
swap_params = False
if l_skewness_and_kurtosis[0] < 0.0:
l_skewness_and_kurtosis[0] = -l_skewness_and_kurtosis[0]
swap_params = True
l_skewness_and_kurtosis[1] = np.minimum(
l_skewness_and_kurtosis[1], 1.0 - 1.0e-5)
# If L-skewness is zero, left and right parameters are equal and there is a
# a closed form to compute them from L-kurtosis. We start from this value
# and then change them to match simultaneously L-skeweness and L-kurtosis.
# For that, we parametrize the search space with the array
# [h_rigth, h_right - h_left], i.e. the value of the right parameter and the
# difference right minus left paramerters. In the search iteration, we
# alternate between updates on the first and the second entry of the search
# parameters.
initial_h = 3.0 - 1.0 / np.cos(
np.pi / 15.0 * (l_skewness_and_kurtosis[1] - 6.0))
search_params = np.array([initial_h, 0.0], dtype=dtype)
# Current lower and upper bounds for the search parameters.
min_search_params = np.array([initial_h, 0.0], dtype=dtype)
max_search_params = np.array([1.0 - 1.0e-7, initial_h], dtype=dtype)
current_iter = 0
previous_search_params = np.zeros_like(search_params)
while current_iter < stop_iter_step:
# Search for L-skewness at constant h. Increase delta_h.
error_skewness = lambda x: _params_to_errors( # pylint: disable=g-long-lambda
search_params[0], x, l_skewness_and_kurtosis)[0]
if error_skewness(max_search_params[1]) > 0.0:
low_delta_h, high_delta_h = _binary_search(
error_skewness, min_search_params[1], max_search_params[1])
search_params[1] = high_delta_h
max_search_params[1] = high_delta_h # The new delta is an upperbound.
upperbound_delta_found = True
else:
search_params[1] = max_search_params[1]
min_search_params[1] = max_search_params[1] # No solution: lowerbound.
upperbound_delta_found = False
# Search for L-kurtosis at constant possibly overestimated delta.
error_kurtosis = lambda x: _params_to_errors( # pylint: disable=g-long-lambda
x, search_params[1], l_skewness_and_kurtosis)[1]
low_h, high_h = _binary_search(
error_kurtosis, min_search_params[0], max_search_params[0])
if upperbound_delta_found:
search_params[0] = high_h
max_search_params[0] = high_h # Delta overestimated: upperbound for h.
else:
search_params[0] = low_h
min_search_params[0] = low_h # Delta underestimated: lowerbound for h.
max_search_params[1] = low_h # Delta not found, search on full range.
if upperbound_delta_found: # If not found, we repeat the first 2 steps.
# Otherwise, Search for delta at constant overestimated h.
error_skewness = lambda x: _params_to_errors( # pylint: disable=g-long-lambda
search_params[0], x, l_skewness_and_kurtosis)[0]
low_delta_h, high_delta_h = _binary_search(
error_skewness, min_search_params[1], max_search_params[1])
search_params[1] = low_delta_h
min_search_params[1] = low_delta_h
# Search for h at constant delta.
error_kurtosis = lambda x: _params_to_errors( # pylint: disable=g-long-lambda
x, search_params[1], l_skewness_and_kurtosis)[1]
low_h, high_h = _binary_search(
error_kurtosis, min_search_params[0], max_search_params[0])
search_params[0] = low_h
min_search_params[0] = low_h
current_error = _params_to_errors(
search_params[0], search_params[1], l_skewness_and_kurtosis)
delta_search_params = search_params - previous_search_params
current_iter += 1
previous_search_params = search_params.copy()
if (np.all(np.abs(current_error) < stop_error_step) or
np.all(np.abs(delta_search_params) < stop_value_step)):
break
result[0] = search_params[0] - search_params[1]
result[1] = search_params[0]
if swap_params:
result = result[::-1]
return result
def lambert_w(x):
"""Computes the Lambert W function of a `Tensor`.
Computes the principal branch of the Lambert W function, i.e. the value w such
that w * exp(w) = x for a a given x. For the principal branch, x must be real
x >= -1 / e, and w >= -1.
Args:
x: A `Tensor` containing the values for which the principal branch of
the Lambert W function is computed.
Returns:
A `Tensor` with the same shape and dtype as x containing the value of the
Lambert W function.
"""
dtype = x.dtype
e = tf.constant(np.exp(1.0), dtype)
inv_e = tf.constant(np.exp(-1.0), dtype)
s = (np.exp(1) - 1.0) / (np.exp(2) - 1.0)
slope = tf.constant(s, dtype)
c = tf.constant(1 / np.exp(1) * (1 - s), dtype)
log_s = tf.math.log(x)
w_init = tf.where(
x < inv_e,
x,
tf.where(x < e,
slope * x + c,
(log_s + (1.0 / log_s - 1.0) * tf.math.log(log_s))))
def newton_update(count, w):
expw = tf.math.exp(w)
wexpw = w * expw
return count + 1, w - (wexpw - x) / (expw + wexpw)
count = tf.constant(0, tf.int32)
num_iter = tf.constant(8)
(unused_final_count, w) = tf.while_loop(
lambda count, w: tf.less(count, num_iter),
newton_update,
[count, w_init])
return w
def inverse_tukey_hh(x, hl, hr):
"""Compute the inverse of the Tukey HH function.
The Tukey HH function transforms a standard Gaussian distribution into the
Tukey HH distribution and it's defined as:
x = u * exp(hl * u ^ 2) for u < 0 and x = u * exp(hr * u ^ 2) for u >= 0.
Given the values of x, this function computes the corresponding values of u.
Args:
x: The input `Tensor`.
hl: The "left" parameter of the distribution. It must have the same dtype
and shape of x (or a broadcastable shape) or be a scalar.
hr: The "right" parameter of the distribution. It must have the same dtype
and shape of x (or a broadcastable shape) or be a scalar.
Returns:
The inverse of the Tukey HH function.
"""
def one_side(x, h):
h_x_square = tf.multiply(h, tf.square(x))
return tf.where(
# Prevents the 0 / 0 form for small values of x..
tf.less(h_x_square, 1.0e-7),
x, # The error is < 1e-14 for this case.
tf.sqrt(tf.divide(lambert_w(h_x_square), h)))
return tf.where(tf.less(x, 0.0), -one_side(-x, hl), one_side(x, hr))
| [
"[email protected]"
] | |
f9c962a39baa75c624eed77ea4bb3ed83b1d85ba | 4851d160a423b4a65e81a75d5b4de5218de958ee | /Number Format.py | 63d6f6c7b330bb5a08ff5f80773c51da98bf8514 | [] | no_license | LarisaOvchinnikova/python_codewars | 519508e5626303dcead5ecb839c6d9b53cb3c764 | 5399f4be17e4972e61be74831703a82ce9badffd | refs/heads/master | 2023-05-05T14:52:02.100435 | 2021-05-25T18:36:51 | 2021-05-25T18:36:51 | 319,399,343 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | # https://www.codewars.com/kata/565c4e1303a0a006d7000127
def number_format(n):
sign = "" if n >=0 else "-"
n = str(abs(n))
if len(n) <= 3: return sign+n
s = []
while len(n)>0:
s.append(n[-3:])
n = n[:-3]
return sign+",".join(s[::-1]) | [
"[email protected]"
] | |
eaeecb735041bbbe5891d953465fba1e4783f1c7 | 43b9eb11e90dbf984f950e4885085c83daa719b2 | /migrations/versions/339a6b145e56_user_status.py | f81e899bdc79017e6803c52bc8c09c0dbee04e15 | [
"Apache-2.0"
] | permissive | dpdi-unifor/thorn | 8ec7982812fe07906567514ad6628154ea99f620 | 37695c66607f60b29afd25ac512c0242079e1342 | refs/heads/master | 2023-01-02T19:48:27.409446 | 2020-09-09T14:31:51 | 2020-09-09T14:31:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | """User status
Revision ID: 339a6b145e56
Revises: 9f52309f0d44
Create Date: 2020-03-26 11:53:32.044767
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '339a6b145e56'
down_revision = '9f52309f0d44'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('status', sa.Enum('ENABLED', 'DELETED', 'PENDING_APPROVAL', name='UserStatusEnumType'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'status')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
57260f6b5d40f289986b5d8fb601c421eafeae75 | 35c1a591ce5ea045b72a1d9f89fc0d8f46cdd78b | /rice/deps/prompt_toolkit/application/dummy.py | ebe2f334fe11c7f8340bb99e392d2af6fc46a457 | [
"MIT"
] | permissive | jimhester/rice | 0a0aef48ccab3d6b2d7f700cc311977e8c4a3740 | 61cafc717d9398a57ecd2afb2a086afe1c676e30 | refs/heads/master | 2021-07-07T21:37:00.826756 | 2017-09-27T14:02:49 | 2017-09-27T14:02:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 998 | py | from __future__ import unicode_literals
from .application import Application
from prompt_toolkit.input import DummyInput
from prompt_toolkit.output import DummyOutput
__all__ = (
'DummyApplication',
)
class DummyApplication(Application):
"""
When no `Application` is running,
`prompt_toolkit.application.current.get_app` will run an instance of this
`Application`.
"""
def __init__(self):
super(DummyApplication, self).__init__(output=DummyOutput(), input=DummyInput())
def run(self):
raise NotImplementedError('A DummyApplication is not supposed to run.')
def run_async(self):
raise NotImplementedError('A DummyApplication is not supposed to run.')
def run_in_terminal(self):
raise NotImplementedError
def run_coroutine_in_terminal(self):
raise NotImplementedError
def run_system_command(self):
raise NotImplementedError
def suspend_to_background(self):
raise NotImplementedError
| [
"[email protected]"
] | |
9fa6656765c702e8f6936b48d78042e07de88013 | 3856dbedcf846f9845290e9b2efa4d18e300623d | /test/test_user_report.py | 25a240a1bb120206353d2d97b51104617f0cbe91 | [] | no_license | Valandur/webapi-client-python | 5b314da41803f5b55a5c6cce62d2384b86d0fa37 | 8502726bf3facb17c6fa681faf0f600207eb61ae | refs/heads/master | 2022-02-04T21:45:37.686703 | 2019-07-23T12:11:47 | 2019-07-23T12:11:47 | 113,748,693 | 2 | 0 | null | 2019-01-09T16:07:31 | 2017-12-10T12:38:14 | Python | UTF-8 | Python | false | false | 3,145 | py | # coding: utf-8
"""
Web-API
Access Sponge powered Minecraft servers through a WebAPI # Introduction This is the documentation of the various API routes offered by the WebAPI plugin. This documentation assumes that you are familiar with the basic concepts of Web API's, such as `GET`, `PUT`, `POST` and `DELETE` methods, request `HEADERS` and `RESPONSE CODES` and `JSON` data. By default this documentation can be found at http:/localhost:8080 (while your minecraft server is running) and the various routes start with http:/localhost:8080/api/v5... As a quick test try reaching the route http:/localhost:8080/api/v5/info (remember that you can only access \\\"localhost\\\" routes on the server on which you are running minecraft). This route should show you basic information about your server, like the motd and player count. # List endpoints Lots of objects offer an endpoint to list all objects (e.g. `GET: /world` to get all worlds). These endpoints return only the properties marked 'required' by default, because the list might be quite large. If you want to return ALL data for a list endpoint add the query parameter `details`, (e.g. `GET: /world?details`). > Remember that in this case the data returned by the endpoint might be quite large. # Debugging endpoints Apart from the `?details` flag you can also pass some other flags for debugging purposes. Remember that you must include the first query parameter with `?`, and further ones with `&`: `details`: Includes details for list endpoints `accept=[json/xml]`: Manually set the accept content type. This is good for browser testing, **BUT DON'T USE THIS IN PRODUCTION, YOU CAN SUPPLY THE `Accepts` HEADER FOR THAT** `pretty`: Pretty prints the data, also good for debugging in the browser. An example request might look like this: `http://localhost:8080/api/v5/world?details&accpet=json&pretty&key=MY-API-KEY` # Additional data Certain endpoints (such as `/player`, `/entity` and `/tile-entity` have additional properties which are not documented here, because the data depends on the concrete object type (eg. `Sheep` have a wool color, others do not) and on the other plugins/mods that are running on your server which might add additional data. You can also find more information in the github docs (https:/github.com/Valandur/Web-API/tree/master/docs/DATA.md) # noqa: E501
OpenAPI spec version: 5.4.2-S7.1.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.user_report import UserReport # noqa: E501
from swagger_client.rest import ApiException
class TestUserReport(unittest.TestCase):
"""UserReport unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUserReport(self):
"""Test UserReport"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.user_report.UserReport() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
3014d12db736cc4a036c7172f8e025c97af34d2f | aee7a6cca6a2674f044d7a1cacf7c72d7438b8b1 | /cup_skills/stats/average_rewardgp_reset_good_7.py | 2458b18354d006e66a6d8060bf125fd8d3f27459 | [] | no_license | lagrassa/rl-erase | efd302526504c1157fa5810e886caccba8570f1b | 0df5c8ce4835c4641a2303d11095e9c27307f754 | refs/heads/master | 2021-05-13T13:36:12.901945 | 2019-08-01T02:13:15 | 2019-08-01T02:13:15 | 116,709,555 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 780 | py | 52.72727272727272,45.45454545454545,35.45454545454545,38.18181818181819,19.090909090909093,44.54545454545455,20.909090909090907,52.72727272727272,51.81818181818182,50.0,-100.0,28.18181818181818,-100.0,28.18181818181818,39.09090909090909,49.09090909090909,35.45454545454545,38.18181818181819,31.818181818181817,52.72727272727272,22.727272727272727,31.818181818181817,26.36363636363636,18.181818181818183,40.909090909090914,23.636363636363637,48.18181818181818,29.09090909090909,15.454545454545453,52.72727272727272,47.27272727272727,37.27272727272727,30.0,26.36363636363636,31.818181818181817,50.90909090909091,36.36363636363637,33.63636363636363,51.81818181818182,45.45454545454545,26.36363636363636,19.090909090909093,20.0,32.72727272727273,20.909090909090907,24.545454545454547, | [
"[email protected]"
] | |
2bd40a80b828137202059058e88f7504df2e6470 | 8613ec7f381a6683ae24b54fb2fb2ac24556ad0b | /boot/hard/2017.py | 36601afabce20178c45edae2db36c8014b9864eb | [] | no_license | Forest-Y/AtCoder | 787aa3c7dc4d999a71661465349428ba60eb2f16 | f97209da3743026920fb4a89fc0e4d42b3d5e277 | refs/heads/master | 2023-08-25T13:31:46.062197 | 2021-10-29T12:54:24 | 2021-10-29T12:54:24 | 301,642,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 715 | py | q = int(input())
l, r = [0] * q, [0] * q
for i in range(q):
l[i], r[i] = map(int, input().split())
mini = min(min(l), min(r))
maxi = max(max(l), max(r))
ans = [0] * (maxi + 1)
prime = [0] * (maxi + 1)
def judge_prime(n):
for i in range(2, int(n ** 0.5) + 1):
if n % i == 0:
return False
return True if n != 1 else False
for i in range((mini + 1) // 2, maxi + 1):
prime[i] = judge_prime(i)
for i in range(mini, maxi + 1, 2):
ans[i] = ans[i - 2] + 1 if prime[i] and prime[(i + 1) // 2] else ans[i - 2]
#print(i, ans[i], ans[i - 2])
#print(ans[1:])
for i in range(q):
#print(ans[r[i]], ans[l[i] - 2], ans[l[i] - 1])
print(ans[r[i]] - ans[max(0, l[i] - 2)])
| [
"[email protected]"
] | |
6018b78f698286b8dcc5c68df4f3473b415eb318 | bf8870d923adca9877d4b4dacef67f0a454727a8 | /_other/decorator.py | 23fb94a26518ba636f1e500656ad6d7d4a7a468e | [] | no_license | artkpv/code-dojo | 6f35a785ee5ef826e0c2188b752134fb197b3082 | 0c9d37841e7fc206a2481e4640e1a024977c04c4 | refs/heads/master | 2023-02-08T22:55:07.393522 | 2023-01-26T16:43:33 | 2023-01-26T16:43:33 | 158,388,327 | 1 | 0 | null | 2023-01-26T08:39:46 | 2018-11-20T12:45:44 | C# | UTF-8 | Python | false | false | 922 | py | import threading
def persistant_caller(max_calls=None, timeout_ms=None):
def actual(function):
def persistant_function(*args, **kwargs):
count = 0
while True:
try:
count += 1
return function(*args, **kwargs)
except Exception as e:
if count > max_calls:
# report exception
raise e
# report exception
if timeout_ms:
threading.sleep(timeout_ms)
return persistant_function
return actual
count = 0
@persistant_caller(max_calls=2, timeout_ms=100)
def printer(arg1, key1=None, key2=None):
global count
if count < 0:
count += 1
raise Exception('first exception')
print('printer', arg1, key1, key2)
printer(1, key1='key1val', key2='key2val')
| [
"[email protected]"
] | |
eacb9522092aa5e0ceb98aa10b3504cb2ba0ef10 | aceaf99df06081676f33145ff104009fcf30e922 | /core/permissions.py | 709d90b2de6a29e2b5eb15e460ad2a721747fd68 | [] | no_license | jonatasoli/process_monitoring | 2222d692b5255cbdf1e982940bf4f8a749257295 | 6ba2b3cf68b8bf8be6e1a4547b98c09e08d91794 | refs/heads/master | 2021-01-24T01:59:50.231667 | 2018-02-27T20:00:52 | 2018-02-27T20:00:52 | 122,831,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
"""Allow users to edit their own profile"""
def has_object_permission(self, request, view, obj):
"""Check user is trying to edit their own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id
class PostOwnStatus(permissions.BasePermission):
"""Allow users to update their own profile."""
def has_object_permission(self, request, view, obj):
"""Check the user is trying to update their own status."""
if request.method in permissions.SAFE_METHODS:
return True
return obj.user_profile.id == request.user.id
| [
"[email protected]"
] | |
c1bcd65d34b7a3e59e2d47a48b25316f3ee6c058 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/SOLOv1/mmdet/models/mask_heads/fcn_mask_head.py | 26cb3c0ff0c362870863dc2fddb5f9a2379cb87e | [
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,012 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mmcv
import numpy as np
import pycocotools.mask as mask_util
import torch
import torch.nn as nn
from torch.nn.modules.utils import _pair
from mmdet.core import auto_fp16, force_fp32, mask_target
from ..builder import build_loss
from ..registry import HEADS
from ..utils import ConvModule
@HEADS.register_module
class FCNMaskHead(nn.Module):
def __init__(self,
num_convs=4,
roi_feat_size=14,
in_channels=256,
conv_kernel_size=3,
conv_out_channels=256,
upsample_method='deconv',
upsample_ratio=2,
num_classes=81,
class_agnostic=False,
conv_cfg=None,
norm_cfg=None,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)):
super(FCNMaskHead, self).__init__()
if upsample_method not in [None, 'deconv', 'nearest', 'bilinear']:
raise ValueError(
'Invalid upsample method {}, accepted methods '
'are "deconv", "nearest", "bilinear"'.format(upsample_method))
self.num_convs = num_convs
# WARN: roi_feat_size is reserved and not used
self.roi_feat_size = _pair(roi_feat_size)
self.in_channels = in_channels
self.conv_kernel_size = conv_kernel_size
self.conv_out_channels = conv_out_channels
self.upsample_method = upsample_method
self.upsample_ratio = upsample_ratio
self.num_classes = num_classes
self.class_agnostic = class_agnostic
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.fp16_enabled = False
self.loss_mask = build_loss(loss_mask)
self.convs = nn.ModuleList()
for i in range(self.num_convs):
in_channels = (
self.in_channels if i == 0 else self.conv_out_channels)
padding = (self.conv_kernel_size - 1) // 2
self.convs.append(
ConvModule(
in_channels,
self.conv_out_channels,
self.conv_kernel_size,
padding=padding,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg))
upsample_in_channels = (
self.conv_out_channels if self.num_convs > 0 else in_channels)
if self.upsample_method is None:
self.upsample = None
elif self.upsample_method == 'deconv':
self.upsample = nn.ConvTranspose2d(
upsample_in_channels,
self.conv_out_channels,
self.upsample_ratio,
stride=self.upsample_ratio)
else:
self.upsample = nn.Upsample(
scale_factor=self.upsample_ratio, mode=self.upsample_method)
out_channels = 1 if self.class_agnostic else self.num_classes
logits_in_channel = (
self.conv_out_channels
if self.upsample_method == 'deconv' else upsample_in_channels)
self.conv_logits = nn.Conv2d(logits_in_channel, out_channels, 1)
self.relu = nn.ReLU(inplace=True)
self.debug_imgs = None
def init_weights(self):
for m in [self.upsample, self.conv_logits]:
if m is None:
continue
nn.init.kaiming_normal_(
m.weight, mode='fan_out', nonlinearity='relu')
nn.init.constant_(m.bias, 0)
@auto_fp16()
def forward(self, x):
for conv in self.convs:
x = conv(x)
if self.upsample is not None:
x = self.upsample(x)
if self.upsample_method == 'deconv':
x = self.relu(x)
mask_pred = self.conv_logits(x)
return mask_pred
def get_target(self, sampling_results, gt_masks, rcnn_train_cfg):
pos_proposals = [res.pos_bboxes for res in sampling_results]
pos_assigned_gt_inds = [
res.pos_assigned_gt_inds for res in sampling_results
]
mask_targets = mask_target(pos_proposals, pos_assigned_gt_inds,
gt_masks, rcnn_train_cfg)
return mask_targets
@force_fp32(apply_to=('mask_pred', ))
def loss(self, mask_pred, mask_targets, labels):
loss = dict()
if self.class_agnostic:
loss_mask = self.loss_mask(mask_pred, mask_targets,
torch.zeros_like(labels))
else:
loss_mask = self.loss_mask(mask_pred, mask_targets, labels)
loss['loss_mask'] = loss_mask
return loss
def get_seg_masks(self, mask_pred, det_bboxes, det_labels, rcnn_test_cfg,
ori_shape, scale_factor, rescale):
"""Get segmentation masks from mask_pred and bboxes.
Args:
mask_pred (Tensor or ndarray): shape (n, #class+1, h, w).
For single-scale testing, mask_pred is the direct output of
model, whose type is Tensor, while for multi-scale testing,
it will be converted to numpy array outside of this method.
det_bboxes (Tensor): shape (n, 4/5)
det_labels (Tensor): shape (n, )
img_shape (Tensor): shape (3, )
rcnn_test_cfg (dict): rcnn testing config
ori_shape: original image size
Returns:
list[list]: encoded masks
"""
if isinstance(mask_pred, torch.Tensor):
mask_pred = mask_pred.sigmoid().cpu().numpy()
assert isinstance(mask_pred, np.ndarray)
# when enabling mixed precision training, mask_pred may be float16
# numpy array
mask_pred = mask_pred.astype(np.float32)
cls_segms = [[] for _ in range(self.num_classes - 1)]
bboxes = det_bboxes.cpu().numpy()[:, :4]
labels = det_labels.cpu().numpy() + 1
if rescale:
img_h, img_w = ori_shape[:2]
else:
img_h = np.round(ori_shape[0] * scale_factor).astype(np.int32)
img_w = np.round(ori_shape[1] * scale_factor).astype(np.int32)
scale_factor = 1.0
for i in range(bboxes.shape[0]):
if not isinstance(scale_factor, (float, np.ndarray)):
scale_factor = scale_factor.cpu().numpy()
bbox = (bboxes[i, :] / scale_factor).astype(np.int32)
label = labels[i]
w = max(bbox[2] - bbox[0] + 1, 1)
h = max(bbox[3] - bbox[1] + 1, 1)
if not self.class_agnostic:
mask_pred_ = mask_pred[i, label, :, :]
else:
mask_pred_ = mask_pred[i, 0, :, :]
bbox_mask = mmcv.imresize(mask_pred_, (w, h))
bbox_mask = (bbox_mask > rcnn_test_cfg.mask_thr_binary).astype(
np.uint8)
if rcnn_test_cfg.get('crop_mask', False):
im_mask = bbox_mask
else:
im_mask = np.zeros((img_h, img_w), dtype=np.uint8)
im_mask[bbox[1]:bbox[1] + h, bbox[0]:bbox[0] + w] = bbox_mask
if rcnn_test_cfg.get('rle_mask_encode', True):
rle = mask_util.encode(
np.array(im_mask[:, :, np.newaxis], order='F'))[0]
cls_segms[label - 1].append(rle)
else:
cls_segms[label - 1].append(im_mask)
return cls_segms
| [
"[email protected]"
] | |
f488fb1b598893609ff4510a7ee334fda84ad105 | 212724dd876c15ef801fb781e907b1c7dd08f4ae | /skyline/webapp/gunicorn.py | 50d0f6817e3af53960712284da80d1324840b628 | [
"MIT"
] | permissive | wfloutier/skyline | b9e769cddccdefeeb7c7cc258524bbf489f9d5eb | b12758dc11564de93c7ad76c1f8ed3327db78aa4 | refs/heads/master | 2020-08-08T03:19:40.283298 | 2019-10-09T11:05:13 | 2019-10-09T11:05:13 | 213,693,601 | 0 | 0 | NOASSERTION | 2019-10-08T16:20:15 | 2019-10-08T16:20:15 | null | UTF-8 | Python | false | false | 1,378 | py | import sys
import os.path
import logging
# import multiprocessing
# import traceback
from logging.handlers import TimedRotatingFileHandler, MemoryHandler
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
sys.path.insert(0, os.path.dirname(__file__))
import settings
bind = '%s:%s' % (settings.WEBAPP_IP, str(settings.WEBAPP_PORT))
# workers = multiprocessing.cpu_count() * 2 + 1
workers = 2
backlog = 10
skyline_app = 'webapp'
skyline_app_logger = '%sLog' % skyline_app
logfile = '%s/%s.log' % (settings.LOG_PATH, skyline_app)
logger = logging.getLogger(skyline_app_logger)
pidfile = '%s/%s.pid' % (settings.PID_PATH, skyline_app)
accesslog = '%s/webapp.access.log' % (settings.LOG_PATH)
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
errorlog = '%s/webapp.log' % (settings.LOG_PATH)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s :: %(process)s :: %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
handler = logging.handlers.TimedRotatingFileHandler(
logfile,
when="midnight",
interval=1,
backupCount=5)
memory_handler = logging.handlers.MemoryHandler(100,
flushLevel=logging.DEBUG,
target=handler)
handler.setFormatter(formatter)
logger.addHandler(memory_handler)
| [
"[email protected]"
] | |
f96b6739d30de98f438bfc15e544eb95f5523574 | 4a7a6f629e4dd16b5ba3db23a6b6369dbb19c10d | /a038- 數字翻轉.py | f4df8c96273b40ed14b01a7ac42e057c5349a0a8 | [] | no_license | jlhung/ZeroJudge-Python | 1170fc70ffc6a1a577d035cd70289529d2bbc07e | 356381363891ba05302736746c698ea85668af50 | refs/heads/master | 2022-12-08T06:42:12.160731 | 2020-09-03T12:04:49 | 2020-09-03T12:04:49 | 282,219,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | '''
20200723 v1.0 jlhung "0"也要輸出 要消除尾數的0
'''
while True:
try:
n = int(input())
if n == 0:
print(0)
break
while n % 10 == 0:
n //= 10
print(str(n)[::-1])
except(EOFError):
break | [
"[email protected]"
] | |
3bdbd3cfdd89c89ececba6bd06fdd7af1e184e39 | 6e68584f2819351abe628b659c01184f51fec976 | /Centre_College/CSC_339_SP2015/vindiniumAI/pybrain/optimization/memetic/inversememetic.py | f38eec205b10fd0408828968956d69f6cf8c206b | [
"WTFPL"
] | permissive | DanSGraham/code | 0a16a2bfe51cebb62819cd510c7717ae24b12d1b | fc54b6d50360ae12f207385b5d25adf72bfa8121 | refs/heads/master | 2020-03-29T21:09:18.974467 | 2017-06-14T04:04:48 | 2017-06-14T04:04:48 | 36,774,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | __author__ = 'Tom Schaul, [email protected]'
from memetic import MemeticSearch
class InverseMemeticSearch(MemeticSearch):
""" Interleaving local search with topology search (inverse of memetic search) """
def _learnStep(self):
self.switchMutations()
MemeticSearch._learnStep(self)
self.switchMutations()
| [
"[email protected]"
] | |
9866fcf46bab6408ee2d067adcfed3f1ed0287ad | fcdfb4231b64e38a5f6611057097def815a6a987 | /baidumap/tests/autopilot/baidumap/tests/__init__.py | 0bf71b2ca06c109fd48386845c2031198789f74a | [] | no_license | liu-xiao-guo/baidumap | f2967efc845347bb40769ea7202bb8d4b4c6e66d | e6ba8ba6bb3df4e2956af55414e5e8a1a34ac06a | refs/heads/master | 2021-01-10T08:45:01.423685 | 2016-03-23T04:47:49 | 2016-03-23T04:47:49 | 54,531,442 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,687 | py | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
"""Ubuntu Touch App Autopilot tests."""
import os
import logging
import baidumap
from autopilot.testcase import AutopilotTestCase
from autopilot import logging as autopilot_logging
import ubuntuuitoolkit
from ubuntuuitoolkit import base
logger = logging.getLogger(__name__)
class BaseTestCase(AutopilotTestCase):
"""A common test case class
"""
local_location = os.path.dirname(os.path.dirname(os.getcwd()))
local_location_qml = os.path.join(local_location, 'Main.qml')
click_package = '{0}.{1}'.format('baidumap', 'liu-xiao-guo')
def setUp(self):
super(BaseTestCase, self).setUp()
self.launcher, self.test_type = self.get_launcher_and_type()
self.app = baidumap.TouchApp(self.launcher(), self.test_type)
def get_launcher_and_type(self):
if os.path.exists(self.local_location_qml):
launcher = self.launch_test_local
test_type = 'local'
else:
launcher = self.launch_test_click
test_type = 'click'
return launcher, test_type
@autopilot_logging.log_action(logger.info)
def launch_test_local(self):
return self.launch_test_application(
base.get_qmlscene_launch_command(),
self.local_location_qml,
app_type='qt',
emulator_base=ubuntuuitoolkit.UbuntuUIToolkitCustomProxyObjectBase)
@autopilot_logging.log_action(logger.info)
def launch_test_click(self):
return self.launch_click_package(
self.click_package,
emulator_base=ubuntuuitoolkit.UbuntuUIToolkitCustomProxyObjectBase)
| [
"[email protected]"
] | |
e29850d4bc107cdd9a707c816fea75d159dd1ae1 | 4cae2a0808d0f200a5f91a9724419a081b8c3eb0 | /create_biometric_users/models/ecube_bio_machine.py | 57453c1b50ef85bdab1500893f326346dbd611f0 | [] | no_license | khyasir/Latitude_Custom_Modules | 7392ba47da8c172f46902d32454e13614b5d5e8b | 6758fc2a97073609dc305e71571f9ea42916f71b | refs/heads/master | 2021-05-02T12:04:37.953490 | 2018-02-08T08:52:24 | 2018-02-08T08:52:24 | 120,735,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from openerp import models, fields, api
class EcubeMachine(models.Model):
_name = 'ecube.machine'
_description = 'EcubeMachine'
name = fields.Char('Machine Name')
machine_ip = fields.Char('Machine IP')
machine_status = fields.Boolean('Machine Status') | [
"[email protected]"
] | |
8258490a8523ca5ddcc472087885ef1dc25aa68b | f2cc1dc87486833613fb83543c68784849fd7319 | /subtests/test_search_item.py | e063b884b30225a9c67e4b1ebc4d511584d3914c | [] | no_license | EduardoUrzuaBo/platziChallenge | cc953e2615653d575cf079bceea4fdcad75a4da0 | a8f06c98f14ee58db47848ec287dcd105b685dcb | refs/heads/master | 2023-07-29T10:44:16.469765 | 2021-09-05T20:14:42 | 2021-09-05T20:14:42 | 403,379,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from src.testproject.decorator import report_assertion_errors
"""
This pytest test was automatically generated by TestProject
Project: Test Projects
Package: TestProject.Generated.Tests.TestProjects
Test: Search Item
Generated by: Eduardo Andres Urzuas ([email protected])
Generated on 09/02/2021, 03:56:35
"""
@report_assertion_errors
def test_main(driver):
"""This test was auto generated from steps of the 'CreateAccount' test."""
# Test Parameters
SearchItem = "Books"
# 1. Click 'q'
q = driver.find_element(By.CSS_SELECTOR,
"#search")
q.click()
# 2. Type '{SearchItem}' in 'q'
q = driver.find_element(By.CSS_SELECTOR,
"#search")
q.send_keys(f'{SearchItem}')
# 3. Send 'ENTER' key(s)
ActionChains(driver).send_keys(Keys.ENTER).perform()
| [
"[email protected]"
] | |
92aa7a25070d981b4443680ae1a1621f0f40d582 | ce4d1c3a1522f382d9b3f73b7f126e7a3616bfb5 | /projects/DensePose/densepose/data/datasets/coco.py | ddd03c25b6956e8afa7d78ac0a259d255fb51541 | [
"Apache-2.0"
] | permissive | davidnvq/detectron2 | 6c01512326687e86ab50c0f89af4e926c0007ae6 | eaca19840e5db014c3dd37dee9920d780b3b6165 | refs/heads/master | 2022-04-26T03:29:08.080258 | 2020-04-24T09:05:07 | 2020-04-24T09:05:07 | 258,421,912 | 1 | 0 | Apache-2.0 | 2020-04-24T06:08:26 | 2020-04-24T06:08:25 | null | UTF-8 | Python | false | false | 4,143 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import os
from dataclasses import dataclass
from typing import Any, Dict, Iterable, Optional
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.data.datasets import load_coco_json
DENSEPOSE_KEYS = ["dp_x", "dp_y", "dp_I", "dp_U", "dp_V", "dp_masks"]
DENSEPOSE_METADATA_URL_PREFIX = "https://dl.fbaipublicfiles.com/densepose/data/"
@dataclass
class CocoDatasetInfo:
name: str
images_root: str
annotations_fpath: str
DATASETS = [
CocoDatasetInfo(
name="densepose_coco_2014_train",
images_root="coco/train2014",
annotations_fpath="coco/annotations/densepose_train2014.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_minival",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_minival2014.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_minival_100",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_minival2014_100.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_valminusminival",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_valminusminival2014.json",
),
CocoDatasetInfo(
name="densepose_chimps",
images_root="densepose_evolution/densepose_chimps",
annotations_fpath="densepose_evolution/annotations/densepose_chimps_densepose.json",
),
]
def _is_relative_local_path(path: os.PathLike):
path_str = os.fsdecode(path)
return ("://" not in path_str) and not os.path.isabs(path)
def _maybe_prepend_base_path(base_path: Optional[os.PathLike], path: os.PathLike):
"""
Prepends the provided path with a base path prefix if:
1) base path is not None;
2) path is a local path
"""
if base_path is None:
return path
if _is_relative_local_path(path):
return os.path.join(base_path, path)
return path
def get_metadata(base_path: Optional[os.PathLike]) -> Dict[str, Any]:
"""
Returns metadata associated with COCO DensePose datasets
Args:
base_path: Optional[os.PathLike]
Base path used to load metadata from
Returns:
Dict[str, Any]
Metadata in the form of a dictionary
"""
meta = {
"densepose_transform_src": _maybe_prepend_base_path(
base_path, "UV_symmetry_transforms.mat"
),
"densepose_smpl_subdiv": _maybe_prepend_base_path(base_path, "SMPL_subdiv.mat"),
"densepose_smpl_subdiv_transform": _maybe_prepend_base_path(
base_path, "SMPL_SUBDIV_TRANSFORM.mat"
),
}
return meta
def register_dataset(dataset_data: CocoDatasetInfo, datasets_root: Optional[os.PathLike] = None):
"""
Registers provided COCO DensePose dataset
Args:
dataset_data: CocoDatasetInfo
Dataset data
datasets_root: Optional[os.PathLike]
Datasets root folder (default: None)
"""
annotations_fpath = _maybe_prepend_base_path(datasets_root, dataset_data.annotations_fpath)
images_root = _maybe_prepend_base_path(datasets_root, dataset_data.images_root)
def load_annotations():
return load_coco_json(
json_file=annotations_fpath,
image_root=images_root,
dataset_name=dataset_data.name,
extra_annotation_keys=DENSEPOSE_KEYS,
)
DatasetCatalog.register(dataset_data.name, load_annotations)
MetadataCatalog.get(dataset_data.name).set(
json_file=annotations_fpath,
image_root=images_root,
**get_metadata(DENSEPOSE_METADATA_URL_PREFIX)
)
def register_datasets(
datasets_data: Iterable[CocoDatasetInfo], datasets_root: Optional[os.PathLike] = None
):
"""
Registers provided COCO DensePose datasets
Args:
datasets_data: Iterable[CocoDatasetInfo]
An iterable of dataset datas
datasets_root: Optional[os.PathLike]
Datasets root folder (default: None)
"""
for dataset_data in datasets_data:
register_dataset(dataset_data, datasets_root)
| [
"[email protected]"
] | |
a071d653f678661efe5f76d8153b380eb2aa9da1 | 5456320f03ed956ff7b1ad6a9539d65a602c71d4 | /mozCingi/fuzzers/mutagen/mutagenExecutor.py | 03e99823055ea5ebf8b00cf285f353720184fc6d | [] | no_license | ShakoHo/mozCingi | 9020cbb4aa65308ca5fd5bf9c074230f1fddb751 | 39239411abc840cd58a05f1fa41a24ae7cf9695f | refs/heads/master | 2016-08-12T12:51:16.331671 | 2016-03-25T09:35:54 | 2016-03-25T09:35:54 | 49,626,247 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | __author__ = 'shako'
import os
from mozCingi.util.mozITPWrapper import MozITPWrapper
from mozCingi.steps.executor import AbsExecutor
class MutagenExecutor(AbsExecutor):
DEFAULT_EXEC_LOG_NAME = "exec.log"
def launch_execute_file(self):
mozitp_obj = MozITPWrapper()
pack_file_name = self.fuzzer_name + "_" + str(self.obj_index) + ".zip"
pack_file_path = os.path.join(self.working_dir, self.DEFAULT_ROOT_TMP_DIR, pack_file_name)
execution_log_dir = os.path.join(self.working_dir, self.DEFAULT_ROOT_LOG_DIR, self.fuzzer_name)
if os.path.exists(execution_log_dir) is False:
os.makedirs(execution_log_dir)
execution_log_path = os.path.join(execution_log_dir, self.DEFAULT_EXEC_LOG_NAME)
mozitp_obj.launch_itp_for_fuzz(pack_file_path, execution_log_path)
mozitp_obj.stop_itp()
| [
"[email protected]"
] | |
a3dbfd07afbe0391b734bd981eafc9d8ac28b53b | 64fe4fcaeb71b5e4d448abed92e03bebd838b1a2 | /Models/Form_Factors.py | b5559332237915e69187a8b4ae18f8282d5fa4c1 | [] | no_license | Caster89/Scattering_Analysis | ea2ddbd9311f0beebae6f083a37d843f44d3d00b | 2bd14efb2d1bb6c1af5173a8ed98b668dbfc4673 | refs/heads/master | 2021-01-19T16:04:41.322316 | 2017-12-17T16:22:46 | 2017-12-17T16:22:46 | 88,247,591 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,797 | py | from config import _AvlbUnits, _UnitsSymbols,_UnitsConv, _AvlbSASFit, _AvlbSASFitDic, _AvlbSASFitDicInv, _lmfitModels, _lmfitModelFunctions, _lmfitDistFunctions
import numpy as np
import scipy as sp
from scipy import signal
from scipy import interpolate
from scipy.integrate import dblquad, tplquad
import logging
import lmfit
from Distributions import *
from lmfit import minimize, Parameter, report_fit
try:
import gmpy2
from gmpy2 import mpz,mpq,mpfr,mpc
except:
gmpy2 = None
from decimal import Decimal
print 'The Schultz fitting function is optimized by using the GMPY2 module to\
deal with the large numbers required. The module was not found, so Decimal will\
be used instead, but the calculations will be slower.'
"""
The distriutions presented here can be found in Polydispersity analysis of scattering
data from self-assembled systems. Phy. Rev. A,45, 2428-2438.
DOI: 10.1103/PhysRevA.45.2428
"""
def single_gauss_spheres(q,R_av = 1,sigma = 1,I0 = 1,bckg=0):
"""sing_gauss_spheres: calculates the scattering pattern of an assembly of
spheres which have a Gaussian number density size distribution.
Args
q (numpy.array): the array containg the list of q-values for which to
calculate the scattering
R_av (int): the mean of the size distribution. Defaults to 1
sigma (int): the dispersion of the distribution. Defaults to 1
I0 (int): the prefactor which includes information on the scattering
length density (SLD) and the concentration of particles. Defaults
to 1
bckg (int): the background value to use in case the background is
not perfectly subtracted. Defaults to 0.
Returns
the scattering curve which has the same size as q
"""
P_q=(4.*np.pi/q**3.)**2.*((1.+q**2.*(R_av**2.+sigma**2.))/2.+\
(((-1.+q**2*R_av**2)/2.-3./2.*q**2.*sigma**2.-2.*q**4.*sigma**4.)*\
np.cos(2.*q*R_av)-q*R_av*(1.+2.*q**2.*sigma**2.)*\
np.sin(2.*q*R_av))*np.exp(-2.*q**2.*sigma**2))
return np.array(10**(I0)*P_q+bckg)
def double_gauss_spheres(q,R1_av = 1,sigma1 = 1, R2_av = 1, sigma2 = 1, I0 = 1,ratio=0.5, bckg = 0):
"""double_gauss_spheres: calculates the scattering pattern of an assembly of
spheres which have a bimodal Gaussian size distribution.
Args
q (numpy.array): the array containg the list of q-values for which to
calculate the scattering
R_av1 (int): the mean of the size distribution of the first
peak. Defaults to 1
sigma1 (int): the dispersion of the first peak. Defaults to 1
R_av2 (int): the mean of the size distribution of the second
peak. Defaults to 1
sigma2 (int): the dispersion of the second peak. Defaults to 1
I0 (int): the prefactor which includes information on the scattering
length density (SLD) and the concentration of particles. Defaults
to 1
ratio (int): the ratio between the first and the second peak. Defaults
to 0.5
bckg (int): the background value to use in case the background is
not perfectly subtracted. Defaults to 0.
Returns
the scattering curve which has the same size as q
"""
return np.array(ratio*single_gauss_spheres(q,R1_av, sigma1,I0,0)+(1-ratio)*single_gauss_spheres(q,R2_av, sigma2,I0,0)+bckg)
def single_schultz_spheres(q, R_av = 1, Z = 50, I0 = 1, bckg = 0 ):
"""sing_schultz_spheres: calculates the scattering pattern of an assembly of
spheres which have a Schultz-Zimm size distribution. Devimal is used to
ensure that the for vey monodisperse distributions (Z>171) the values are not
rounded off to inf. The integrated function is taken from 'Analysis of small
angle neutron scattering spectra from pplydisperse interacting colloids',
DOI: 10.1063/1.446055
sigma = R_av/(Z+1)^0.5
?The Z parameter is defined as z = 1 / sigma^2.?
The definition was taken from:
ttp://sasfit.ingobressler.net/manual/Schultz-Zimm
"""
if gmpy2 is None:
aD = np.array([Decimal((Z+1.)/(qq*R_av)) for qq in q])
"""
numpy trigonometric functions do not support Decimal, therefore the
numpy array is created on the spot using float numbers and transforming
them to Decimal after the calculation
"""
a = (Z+1.)/(q*R_av)
p1 = Decimal(8. * np.pi**2 * R_av**6 * (Z+1)**(-6)) * aD**Decimal(Z+7.)
G11 = aD**Decimal(-(Z+1.)) - (Decimal(4.)+aD**2)**(Decimal(-(Z+1.)/2.)) *\
np.array([Decimal(np.cos((Z+1) * np.arctan(2./aa))) for aa in a])
G12 = Decimal((Z+2.)*(Z+1.)) * (aD**Decimal(-(Z+3.)) + (Decimal(4.) + aD**Decimal(2.))**Decimal(-(Z+3.)/2.) *\
np.array([Decimal(np.cos((Z+3)*np.arctan(2./aa))) for aa in a]))
G13 = Decimal(2.*(Z+1.)) * (Decimal(4.) + aD**2.)**Decimal(-(Z+2.)/2) *\
np.array([Decimal(np.sin((Z+2.)*np.arctan(2./aa))) for aa in a])
G1 = G11+G12-G13
returnVal = Decimal(10**I0)*p1*G1+Decimal(bckg)
else:
a = np.array([mpfr((Z+1.)/(qq*R_av)) for qq in q])
a2 = a**2
a2_1 = (mpfr(4.)+a2)
R_av = mpfr(R_av)
Z = mpfr(Z)
I0 = mpfr(I0)
bckg = mpfr(bckg)
"""
numpy trigonometric functions do not support Decimal, therefore the
numpy array is created on the spot using float numbers and transforming
them to Decimal after the calculation
"""
p1 = 8. * np.pi**2 * R_av**6 * (Z+1)**(-6) * a**(Z+7.)
#G11 = a**-(Z+1.) - (4.+a**2)**(-(Z+1.)/2.) *\
#np.array([gmpy2.cos((Z+1) * gmpy2.atan(2./aa)) for aa in a])
G11 = a**-(Z+1.) - a2_1**(-(Z+1.)/2.) *\
np.array([gmpy2.cos((Z+1) * gmpy2.atan(2./aa)) for aa in a])
G12 = (Z+2.)*(Z+1.) * (a**-(Z+3.) + a2_1**(-(Z+3.)/2.) *\
np.array([gmpy2.cos((Z+3)*gmpy2.atan(2./aa)) for aa in a]))
G13 = 2.*(Z+1.) * a2_1**(-(Z+2.)/2) *\
np.array([gmpy2.sin((Z+2.)*gmpy2.atan(2./aa)) for aa in a])
G1 = G11+G12-G13
returnVal = 10**I0*p1*G1+bckg
returnVal = np.array(returnVal.astype(np.float64))
#print 'Single_schultz calculated with:\nR_av:{} Z:{} I0:{}'.format(R_av, Z, I0)
#print 'length is:{}, of which nan: {}'.format(len(returnVal), np.sum(np.isnan(returnVal)))
return returnVal
def single_schultz_spheres_old(q,R_av = 1,Z = 1, I0 = 1, bckg = 0):
"""sing_schultz_spheres: calculates the scattering pattern of an assembly of
spheres which have a Flory schultz size distribution. the Z parameter is
defined as z = 1 / sigma^2. THe definistion was taken forom:
ttp://sasfit.ingobressler.net/manual/Schultz-Zimm
Args
q (numpy.array): the array containg the list of q-values for which to
calculate the scattering
R_av (int): the mean of the size distribution. Defaults to 1
Z (int): the dispersion of the distribution. For a Flory-Schultz
distribution the Z parameter is defined as Z = 1/sigma^2.
Defaults to 1
I0 (int): the prefactor which includes information on the scattering
length density (SLD) and the concentration of particles. Defaults
to 1
bckg (int): the background value to use in case the background is
not perfectly subtracted. Defaults to 0.
Returns
the scattering curve which has the same size as q
"""
a = (Z+1.)/(q*R_av)
P_q = 8.*np.pi**2*R_av**6*(Z-1.)**(-6.)*a**(Z+7.)*(a**(-(Z+1.))- \
(4.+a**2)**(-(Z+1.)/2)*np.cos((Z+1.)*np.arctan(2/a)) + \
(Z+2.)*(Z+1.)*(a**(-Z-3.)+(4+a**2)**((-Z-3.)/2.)*np.cos((Z+3.)*np.arctan(2./a))) - \
2.*(Z+1.)*(4.+a**2.)**(-(Z+2.)/2.)*np.sin((Z+2.)*np.arctan(2./a)))
return np.nan_to_num(10**I0*P_q+bckg)
def double_schultz_spheres(q, R1_av = 1, Z1 = 1, R2_av = 1,Z2 = 1, I0 = 1, ratio = 0.5, bckg = 0):
"""double_schultz_spheres: calculates the scattering pattern of an assembly of
spheres which have a bimodal Flory Schultz distribution.
Args
q (numpy.array): the array containg the list of q-values for which to
calculate the scattering
R_av1 (int): the mean of the size distribution of the first
peak. Defaults to 1
Z1 (int): the dispersion of the first distribution. For a Flory-Schultz
distribution the Z parameter is defined as Z = 1/sigma^2.
Defaults to 1
R_av2 (int): the mean of the size distribution of the second
peak. Defaults to 1
Z2 (int): the dispersion of the second distribution. For a Flory-Schultz
distribution the Z parameter is defined as Z = 1/sigma^2.
Defaults to 1
I0 (int): the pre-factor which includes information on the scattering
length density (SLD) and the concentration of particles. Defaults
to 1
ratio (int): the ratio between the first and the second peak. Defaults
to 0.5
bckg (int): the background value to use in case the background is
not perfectly subtracted. Defaults to 0.
Returns
the scattering curve which has the same size as q
"""
return np.nan_to_num(ratio*single_schultz_spheres(q,R1_av,Z1,I0,0)+(1-ratio)*single_schultz_spheres(q,R2_av,Z2,I0,0)+bckg)
def monodisperse_cube(q, L=1, I0=1, bckg = 0):
"""
http://www.sasview.org/sasview/user/models/model_functions.html#rectangularprismmodel
:param q: the wavevector, vna be aither a number of a numpy array
:param L: The side of the cube
:param I0: The prefactor in front of the form factor
:param bckg: The constant background to sum
:return: The complete, integrated form factor for a cube
"""
def FF(theta, phi):
A = q*L/2.*np.cos(theta)
B = q*L/2.*np.sin(theta)*np.sin(phi)
C = q*L/2.*np.sin(theta)*np.cos(phi)
return np.sinc(A)*np.sinc(B)+np.sinc(C)
return 10**I0*dblquad(FF, 0, np.pi/2., lambda x: 0, lambda x: np.pi/2.0)[0]+bckg
def single_gaussian_cube(q, L_av=1, sigma=1, I0=1, bckg = 0):
def FF(theta,phi,L):
A = q*L/2.*np.cos(theta)
B = q*L/2.*np.sin(theta)*np.sin(phi)
C = q*L/2.*np.sin(theta)*np.cos(phi)
return single_gauss_distribution(L,L_av,sigma,1)*np.sinc(A)*np.sinc(B)+np.sinc(C)
l_min = max(0,L_av-4*(L_av*sigma))
l_max = L_av+4*(L_av*sigma)
return 10**I0*tplquad(FF, 0, np.pi/2., lambda x: 0, lambda x: np.pi/2.0,)[0]+bckg
| [
"="
] | = |
0bea0ecced4c778b22f949d0bfa1c3a5954fc139 | e0519908caa23bef1873ff69ebd17c5d81f741e1 | /calabiyau/views/sessions.py | 23c24167a6ff049d9af607d405a9047b9d2be499 | [
"BSD-3-Clause"
] | permissive | TachyonicProject/calabiyau | 2fb7af37bd656a686a5f741cadd082b2500718ff | 415a8ada4a93ee84c4776e89c9442af328dcfdd6 | refs/heads/latest | 2020-05-02T04:14:43.953841 | 2019-12-06T04:12:39 | 2019-12-06T04:12:39 | 177,745,608 | 0 | 3 | NOASSERTION | 2019-12-06T04:12:40 | 2019-03-26T08:31:25 | Python | UTF-8 | Python | false | false | 4,448 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Christiaan Frans Rademan <[email protected]>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from luxon import register
from luxon import router
from luxon.utils import sql
from luxon.helpers.api import sql_list
from luxon import MBClient
from calabiyau.lib.ctx import ctx
@register.resources()
class Sessions(object):
def __init__(self):
# Services Users
router.add('GET', '/v1/sessions', self.sessions,
tag='services:view')
router.add('PUT', '/v1/disconnect/{session_id}', self.disconnect,
tag='services:admin')
router.add('PUT', '/v1/clear/{nas_id}', self.clear,
tag='services:admin')
def sessions(self, req, resp):
def ctx_val(ctx_id):
try:
return {'ctx': ctx[ctx_id]}
except IndexError:
return {'ctx': ctx_id}
f_session_id = sql.Field('calabiyau_session.id')
f_session_ctx = sql.Field('calabiyau_session.ctx')
f_session_accttype = sql.Field('calabiyau_session.accttype')
f_session_start = sql.Field('calabiyau_session.acctstarttime')
f_session_updated = sql.Field('calabiyau_session.acctupdated')
f_session_unique_id = sql.Field('calabiyau_session.acctuniqueid')
f_session_ip = sql.Field(
'INET6_NTOA(calabiyau_session.framedipaddress)')
f_nas_ip = sql.Field(
'INET6_NTOA(calabiyau_session.nasipaddress)')
f_session_username = sql.Field('calabiyau_session.username')
f_session_user_id = sql.Field('calabiyau_session.id')
select = sql.Select('calabiyau_session')
select.fields = (f_session_id,
f_session_unique_id,
f_session_start,
f_session_updated,
f_session_user_id,
f_session_username,
f_session_ip,
f_nas_ip,
f_session_ctx,
)
select.where = f_session_accttype != sql.Value('stop')
return sql_list(
req,
select,
search={
'calabiyau_session.acctstarttime': 'datetime',
'calabiyau_session.acctupdated': 'datetime',
'calabiyau_session.user_id': str,
'calabiyau_session.username': str,
'calabiyau_session.acctuniqueid': str,
'calabiyau_session.framedipaddress': 'ip',
'calabiyau_session.nasipaddress': 'ip'},
callbacks={'ctx': ctx_val})
def disconnect(self, req, resp, session_id):
with MBClient('subscriber') as mb:
mb.send('disconnect_session', {'session_id': session_id})
def clear(self, req, resp, nas_id):
with MBClient('subscriber') as mb:
mb.send('clear_nas_sessions', {'nas_id': nas_id})
| [
"[email protected]"
] | |
8bcd9aa863af02fbda6ca89f80c595b263e35e8a | 49a167d942f19fc084da2da68fc3881d44cacdd7 | /kubernetes_asyncio/client/api/authorization_api.py | 420284fe2740cbf841d0d542f50e1b597a20ae81 | [
"Apache-2.0"
] | permissive | olitheolix/kubernetes_asyncio | fdb61323dc7fc1bade5e26e907de0fe6e0e42396 | 344426793e4e4b653bcd8e4a29c6fa4766e1fff7 | refs/heads/master | 2020-03-19T12:52:27.025399 | 2018-06-24T23:34:03 | 2018-06-24T23:34:03 | 136,546,270 | 1 | 0 | Apache-2.0 | 2018-06-24T23:52:47 | 2018-06-08T00:39:52 | Python | UTF-8 | Python | false | false | 4,144 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kubernetes_asyncio.client.api_client import ApiClient
class AuthorizationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_api_group(self, **kwargs): # noqa: E501
"""get_api_group # noqa: E501
get information of a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_group(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_api_group_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_group_with_http_info(**kwargs) # noqa: E501
return data
def get_api_group_with_http_info(self, **kwargs): # noqa: E501
"""get_api_group # noqa: E501
get information of a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_group_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/authorization.k8s.io/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroup', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
dc765d76018ce6cd8317283edeebe0aa3727ded8 | 86b293ef3df8a276c97db49f25e5a9c36822776e | /0x08-python-more_classes/1-rectangle.py | a89c2a3ea1af8ed99a9cdb92d6e584860d74097b | [] | no_license | tayloradam1999/holbertonschool-higher_level_programming | 3c6ceab832ad85448df320a437ddf6c39130f0dd | 70068c87f3058324dca58fc5ef988af124a9a965 | refs/heads/main | 2023-08-19T16:13:04.240756 | 2021-09-28T00:37:03 | 2021-09-28T00:37:03 | 361,856,354 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,179 | py | #!/usr/bin/python3
"""
This module defines a class 'Rectangle' that
defines a rectangle based on '0-rectangle.py'
"""
class Rectangle:
"""Defines a rectangle with a private instance attribute 'width'
and 'height'"""
def __init__(self, width=0, height=0):
self.width = width
self.height = height
@property
def width(self):
"""Property getter for width"""
return self.__width
@width.setter
def width(self, value):
"""Property setter for width that raises Type and Value errors"""
if not isinstance(value, int):
raise TypeError("width must be an integer")
if value < 0:
raise ValueError("width must be >= 0")
self.__width = value
@property
def height(self):
"""Property getter for height"""
return self.__height
@height.setter
def height(self, value):
"""Property setter for height that raises Type and Value errors"""
if not isinstance(value, int):
raise TypeError("height must be an integer")
if value < 0:
raise ValueError("height must be >= 0")
self.__height = value
| [
"[email protected]"
] | |
0d155686d2b7d638897fc2d02dc556dd3da8babb | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /other_time/last_part_and_thing/problem_or_world.py | e42537b5fd907d85a61cab4911bd521a6bc81f4a | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py |
#! /usr/bin/env python
def feel_eye(str_arg):
early_life(str_arg)
print('find_next_part_about_small_person')
def early_life(str_arg):
print(str_arg)
if __name__ == '__main__':
feel_eye('take_company_at_little_case')
| [
"[email protected]"
] | |
0dac5829221d058f43409e95e5d6afb11cbbcefd | 2e2c9cf0bf1f6218f82e7ecddbec17da49756114 | /day14ddt_opnpyxl/day14_封装Hand_excel/demo5列表推导式.py | 450b10ac6ae56ab647b20b0e36b9b6e0c8cf6283 | [] | no_license | guoyunfei0603/py31 | c3cc946cd9efddb58dad0b51b72402a77e9d7592 | 734a049ecd84bfddc607ef852366eb5b7d16c6cb | refs/heads/master | 2023-03-02T20:50:02.052878 | 2021-02-05T06:17:24 | 2021-02-05T06:17:24 | 279,454,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | """
============================
Author:小白31
Time:2020/8/2 22:03
E-mail:[email protected]
============================
"""
# 生成1-100到列表
# li = []
# for i in range(1,101):
# li.append(i)
#
# print(li)
#
# print(list(range(1,101)))
# 生成 ["学号101","学号102","学号103"..."学号150"]
li = []
for i in range(101,151):
li.append("学号{}".format(i))
print(li)
print("-----------------列表推导式------------------")
li2 = ["学号{}".format(i) for i in range(101,151)]
print(li2) | [
"[email protected]"
] | |
013ad4f8eb3ba02e9770aed25cb228d75475289b | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/test/python/030d0c5ebc377ba768e6bdbbc82d64a6cfcbb7d4__main__.py | 030d0c5ebc377ba768e6bdbbc82d64a6cfcbb7d4 | [
"MIT"
] | permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 580 | py | import gi
from ghue.controller import Controller
from ghue.device.hue import HueDeviceManager
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GLib
import phue
from .application import GHueApplication
if __name__ == '__main__':
GLib.set_application_name("Philips Hue")
controller = Controller()
hue_device_manager = HueDeviceManager(bridge=phue.Bridge('philips-hue.local'),
controller=controller)
controller.add_device_manager(hue_device_manager)
app = GHueApplication(controller)
app.run(None)
| [
"[email protected]"
] | |
8817c6128119450c0fb5105ac6f32c7a7f78a753 | c57ce2b6aab3350dedf10942f893b86951076fbc | /link/mongo/__init__.py | 092c06d8331b86e49a60f2566d0767944c5d7e11 | [] | no_license | linkdd/link.mongo | b1c5e66b0519aecdb9d5832d1f74ea1f4e7e8bd4 | 3cd9d264a4e0e44726767385add871c67af2928c | refs/heads/master | 2020-04-10T20:34:23.847508 | 2016-09-02T21:48:22 | 2016-09-02T21:48:22 | 60,538,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | # -*- coding: utf-8 -*-
__version__ = '0.10'
CONF_BASE_PATH = 'link/mongo'
| [
"[email protected]"
] | |
92bda0387d9d98eb70e6077ff9a9ac8da8450137 | dd65e3cbfa3fb5ef378e83f810d67822db396269 | /oracle_mon/views.py | 6abc95f240b3de063267206dfd9120e30846c67d | [] | no_license | ampere1984/dbmon | a385ffa43377df9a203cee443c648f846782f035 | 60a39bb311008e33e08c9df7547be6142bbef6b0 | refs/heads/master | 2020-04-22T02:45:08.430898 | 2019-01-29T09:45:37 | 2019-01-29T09:45:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,217 | py | #! /usr/bin/python
# encoding:utf-8
from django.shortcuts import render
from django.shortcuts import render,render_to_response
from django.http import HttpResponse, HttpResponseRedirect
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# Create your views here.
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
import datetime
from frame import tools
# 配置文件
import ConfigParser
import base64
import frame.models as models_frame
import oracle_mon.models as models_oracle
import frame.oracle_do as ora_do
# Create your views here.
@login_required(login_url='/login')
def oracle_monitor(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
tagsinfo = models_oracle.TabOracleServers.objects.all().order_by('tags')
tagsdefault = request.GET.get('tagsdefault')
if not tagsdefault:
tagsdefault = models_oracle.TabOracleServers.objects.order_by('tags')[0].tags
conn_range_default = request.GET.get('conn_range_default')
if not conn_range_default:
conn_range_default = '1小时'.decode("utf-8")
undo_range_default = request.GET.get('undo_range_default')
if not undo_range_default:
undo_range_default = '1小时'.decode("utf-8")
tmp_range_default = request.GET.get('tmp_range_default')
if not tmp_range_default:
tmp_range_default = '1小时'.decode("utf-8")
ps_range_default = request.GET.get('ps_range_default')
if not ps_range_default:
ps_range_default = '1小时'.decode("utf-8")
conn_begin_time = tools.range(conn_range_default)
undo_begin_time = tools.range(undo_range_default)
tmp_begin_time = tools.range(tmp_range_default)
ps_begin_time = tools.range(ps_range_default)
end_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# 取当前数据库状态
try:
oracle_curr = models_oracle.OracleDb.objects.filter(tags=tagsdefault).get(
tags=tagsdefault, )
except models_oracle.OracleDb.DoesNotExist:
oracle_curr = \
models_oracle.OracleDbHis.objects.filter(tags=tagsdefault).order_by(
'-chk_time')[0]
# 取上一次有效采集的数据
try:
try:
oracleinfo = models_oracle.OracleDb.objects.filter(tags=tagsdefault, percent_process__isnull=False).get(tags=tagsdefault,)
except models_oracle.OracleDb.DoesNotExist:
oracleinfo = \
models_oracle.OracleDbHis.objects.filter(tags=tagsdefault, percent_process__isnull=False).order_by(
'-chk_time')[0]
except Exception, e:
oracleinfo = \
models_oracle.OracleDbHis.objects.filter(tags=tagsdefault).order_by(
'-chk_time')[0]
if oracle_curr.mon_status == 'connected':
check_status = 'success'
oracle_status = '在线'
else:
check_status = 'danger'
oracle_status = '离线'
eventinfo = models_oracle.OracleDbEvent.objects.filter(tags=tagsdefault)
lockinfo = models_oracle.OracleLock.objects.filter(tags=tagsdefault)
conngrow = models_oracle.OracleDbHis.objects.filter(tags=tagsdefault, percent_process__isnull=False).filter(
chk_time__gt=conn_begin_time, chk_time__lt=end_time).order_by('-chk_time')
conngrow_list = list(conngrow)
conngrow_list.reverse()
try:
undoinfo = models_oracle.OracleUndoTbs.objects.get(tags=tagsdefault)
except models_oracle.OracleUndoTbs.DoesNotExist:
undoinfo = models_oracle.OracleUndoTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).order_by('-chk_time')
if not undoinfo:
models_oracle.OracleUndoTbsHis.objects.create(tags=tagsdefault,undo_tbs_name='UNDOTBS1', total_mb=0, used_mb=0,
pct_used=0,rate_level='green')
undoinfo = models_oracle.OracleUndoTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).order_by('-chk_time')[0]
undogrow = models_oracle.OracleUndoTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).filter(
chk_time__gt=undo_begin_time, chk_time__lt=end_time).order_by('-chk_time')
undogrow_list = list(undogrow)
undogrow_list.reverse()
try:
tmpinfo = models_oracle.OracleTmpTbs.objects.get(tags=tagsdefault,tmp_tbs_name='TEMP')
except models_oracle.OracleTmpTbs.DoesNotExist:
tmpinfo = models_oracle.OracleTmpTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).order_by('-chk_time')
if not tmpinfo:
models_oracle.OracleTmpTbsHis.objects.create(tags=tagsdefault, tmp_tbs_name='UNDOTBS1', total_mb=0,
used_mb=0,
pct_used=0, rate_level='green')
tmpinfo = models_oracle.OracleTmpTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).order_by('-chk_time')[0]
tmpgrow = models_oracle.OracleTmpTbsHis.objects.filter(tags=tagsdefault, pct_used__isnull=False).filter(
chk_time__gt=tmp_begin_time, chk_time__lt=end_time).order_by('-chk_time')
tmpgrow_list = list(tmpgrow)
tmpgrow_list.reverse()
psgrow = models_oracle.OracleDbHis.objects.filter(tags=tagsdefault, qps__isnull=False).filter(
chk_time__gt=ps_begin_time, chk_time__lt=end_time).order_by('-chk_time')
psgrow_list = list(psgrow)
psgrow_list.reverse()
# 连接信息
sql = "select host,port,service_name,user,password,user_os,password_os from tab_oracle_servers where tags= '%s' " % tagsdefault
oracle = tools.mysql_query(sql)
host = oracle[0][0]
port = oracle[0][1]
service_name = oracle[0][2]
user = oracle[0][3]
password = oracle[0][4]
password = base64.decodestring(password)
url = host + ':' + port + '/' + service_name
if request.method == 'POST':
if request.POST.has_key('select_tags') or request.POST.has_key('select_conn') or request.POST.has_key('select_undo') or request.POST.has_key('select_tmp') or request.POST.has_key('select_ps'):
if request.POST.has_key('select_tags'):
tagsdefault = request.POST.get('select_tags', None).encode("utf-8")
elif request.POST.has_key('select_conn'):
conn_range_default = request.POST.get('select_conn',None)
elif request.POST.has_key('select_undo'):
undo_range_default = request.POST.get('select_undo', None)
elif request.POST.has_key('select_tmp'):
tmp_range_default = request.POST.get('select_tmp', None)
elif request.POST.has_key('select_ps'):
ps_range_default = request.POST.get('select_ps', None)
return HttpResponseRedirect('/oracle_monitor?tagsdefault=%s&conn_range_default=%s&undo_range_default=%s&tmp_range_default=%s&ps_range_default=%s' %(tagsdefault,conn_range_default,undo_range_default,tmp_range_default,ps_range_default))
else:
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('oracle_monitor.html',
{'conngrow_list': conngrow_list, 'undogrow_list': undogrow_list, 'tmpinfo': tmpinfo,
'tmpgrow_list': tmpgrow_list,'psgrow_list': psgrow_list, 'tagsdefault': tagsdefault, 'tagsinfo': tagsinfo,
'oracleinfo': oracleinfo, 'undoinfo': undoinfo, 'eventinfo': eventinfo,
'lockinfo': lockinfo, 'messageinfo_list': messageinfo_list,
'msg_num': msg_num, 'conn_range_default': conn_range_default,
'undo_range_default': undo_range_default, 'tmp_range_default': tmp_range_default,'ps_range_default': ps_range_default,
'msg_last_content': msg_last_content, 'tim_last': tim_last,'check_status':check_status,'oracle_status':oracle_status})
@login_required(login_url='/login')
def show_oracle(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
dbinfo_list = models_oracle.OracleDb.objects.order_by("rate_level")
paginator = Paginator(dbinfo_list, 10)
page = request.GET.get('page')
try:
dbinfos = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
dbinfos = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
dbinfos = paginator.page(paginator.num_pages)
if request.method == 'POST':
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('show_oracle.html',
{'dbinfos': dbinfos, 'messageinfo_list': messageinfo_list, 'msg_num': msg_num,
'msg_last_content': msg_last_content, 'tim_last': tim_last})
@login_required(login_url='/login')
def show_oracle_resource(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
tagsinfo = models_oracle.OracleDb.objects.filter(mon_status='connected')
tagsdefault = request.GET.get('tagsdefault')
if not tagsdefault:
tagsdefault = models_oracle.OracleDb.objects.filter(mon_status='connected').order_by('tags')[0].tags
typedefault = request.GET.get('typedefault')
redo_range_default = request.GET.get('redo_range_default')
if not redo_range_default:
redo_range_default = 7
tbsinfo_list = models_oracle.OracleTbs.objects.filter(tags=tagsdefault).order_by('-pct_used')
# 分页
paginator_tbs = Paginator(tbsinfo_list, 5)
undotbsinfo_list = models_oracle.OracleUndoTbs.objects.filter(tags=tagsdefault).order_by('-pct_used')
paginator_undo = Paginator(undotbsinfo_list, 5)
tmptbsinfo_list = models_oracle.OracleTmpTbs.objects.filter(tags=tagsdefault).order_by('-pct_used')
paginator_tmp = Paginator(tmptbsinfo_list, 5)
page_tbs = request.GET.get('page_tbs')
try:
tbsinfos = paginator_tbs.page(page_tbs)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
tbsinfos = paginator_tbs.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
tbsinfos = paginator_tbs.page(paginator_tbs.num_pages)
page_undo = request.GET.get('page_undo')
try:
undotbsinfos = paginator_undo.page(page_undo)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
undotbsinfos = paginator_undo.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
undotbsinfos = paginator_undo.page(paginator_undo.num_pages)
page_tmp = request.GET.get('page_tmp')
try:
tmptbsinfos = paginator_undo.page(page_tmp)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
tmptbsinfos = paginator_tmp.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
tmptbsinfos = paginator_tmp.page(paginator_tmp.num_pages)
# 获取控制文件信息
sql = "select host,port,service_name,user,password,user_os,password_os from tab_oracle_servers where tags= '%s' " % tagsdefault
oracle = tools.mysql_query(sql)
host = oracle[0][0]
port = oracle[0][1]
service_name = oracle[0][2]
user = oracle[0][3]
password = oracle[0][4]
password = base64.decodestring(password)
url = host + ':' + port + '/' + service_name
sql = """
select name,round(block_size*file_size_blks/1024/1024,2) size_M,'controlfile' tpye from v$controlfile
"""
oracle_controlfiles = tools.oracle_django_query(user, password, url, sql)
# 获取在线日志
sql = """
select a.GROUP# group_no,b.THREAD# thread_no,a.TYPE,b.SEQUENCE# sequence_no,b.BYTES/1024/1024 SIZE_M,b.ARCHIVED,b.STATUS,a.MEMBER from v$logfile a,v$log b where a.GROUP#=b.GROUP#(+)
"""
oracle_redo_files = tools.oracle_django_query(user, password, url, sql)
# 在线日志统计
if redo_range_default == '1':
sql = """
select 'hh'||to_char(first_time, 'hh24') stat_date,
count(1) log_count,
(select bytes / 1024 / 1024 sizem from v$log where rownum < 2) log_size
from v$log_history
where to_char(first_time, 'yyyymmdd') < to_char(sysdate, 'yyyymmdd')
and to_char(first_time, 'yyyymmdd') >=
to_char(sysdate - 1, 'yyyymmdd')
group by to_char(first_time, 'hh24'),to_char(first_time, 'dy')
order by to_char(first_time, 'hh24')
"""
oracle_redo_cnts = tools.oracle_django_query(user, password, url, sql)
else:
sql = """ select to_char(first_time, 'yyyy-mm-dd') stat_date,
count(1) log_count,
(select bytes / 1024 / 1024 sizem from v$log where rownum < 2) log_size
from v$log_history
where to_char(first_time, 'yyyymmdd') < to_char(sysdate, 'yyyymmdd')
and to_char(first_time, 'yyyymmdd') >= to_char(sysdate-%s, 'yyyymmdd')
group by to_char(first_time, 'yyyy-mm-dd'), to_char(first_time, 'dy') order by to_char(first_time, 'yyyy-mm-dd')""" % redo_range_default
oracle_redo_cnts = tools.oracle_django_query(user, password, url, sql)
# 表变化记录
sql = """
select table_owner,table_name,inss,upds,dels,
to_char(inss + upds + dels) dmls,to_char(sysdate , 'yyyy-mm-dd') get_date,truncated,num_rows,
to_char(last_analyzed ,'yyyy-mm-dd hh24:mi:ss') last_analyzed
from (select m.table_owner, m.table_name, inserts as inss, updates as upds, deletes as dels, truncated, t.num_rows,t.last_analyzed
from sys.dba_tab_modifications m, dba_tables t
where m.table_name = t.table_name
and t.owner not in ('SYS','SYSTEM','OUTLN','DIP','ORACLE_OCM','DBSNMP','APPQOSSYS','WMSYS','EXFSYS',
'CTXSYS','ANONYMOUS','XDB','XS$NULL','ORDDATA','SI_INFORMTN_SCHEMA','ORDPLUGINS','ORDSYS','MDSYS','OLAPSYS',
'MDDATA','SPATIAL_WFS_ADMIN_USR','SPATIAL_CSW_ADMIN_USR','SYSMAN','MGMT_VIEW','APEX_030200','FLOWS_FILES',
'APEX_PUBLIC_USER','OWBSYS','OWBSYS_AUDIT','SCOTT')
and m.table_owner = t.owner and m.partition_name is null)
"""
oracle_table_changes = tools.oracle_django_query(user, password, url, sql)
# 序列
sql = """
select sequence_owner,sequence_name,min_value,max_value,increment_by,cycle_flag,order_flag,
cache_size,last_number,
round((max_value - last_number) / (max_value - min_value), 2) * 100 pct_used,
(case when (round((max_value - last_number) / (max_value - min_value), 2) * 100) > 30
then 'green'
when (round((max_value - last_number) / (max_value - min_value), 2) * 100) <= 30 and (round((max_value - last_number) / (max_value - min_value), 2) * 100) > 10
then 'yellow'
when (round((max_value - last_number) / (max_value - min_value), 2) * 100) <= 10
then 'red'
else ''
end) seq_color,
to_char(sysdate, 'yyyy-mm-dd') last_analyzed
from dba_sequences s
where s.sequence_owner not in ('SYS','SYSTEM','OUTLN','DIP','ORACLE_OCM','DBSNMP','APPQOSSYS','WMSYS','EXFSYS',
'CTXSYS','ANONYMOUS','XDB','XS$NULL','ORDDATA','SI_INFORMTN_SCHEMA','ORDPLUGINS','ORDSYS','MDSYS','OLAPSYS',
'MDDATA','SPATIAL_WFS_ADMIN_USR','SPATIAL_CSW_ADMIN_USR','SYSMAN','MGMT_VIEW','APEX_030200','FLOWS_FILES',
'APEX_PUBLIC_USER','OWBSYS','OWBSYS_AUDIT','SCOTT')
"""
oracle_sequences = tools.oracle_django_query(user, password, url, sql)
# 账号
sql = """
select username,profile,to_char(created,'yyyy-mm-dd hh24:mi:ss') created,
account_status,
(case when account_status <> 'OPEN' then 'red' else 'green'end ) account_color,
to_char(lock_date,'yyyy-mm-dd hh24:mi:ss') lock_date,
to_char(expiry_date,'yyyy-mm-dd hh24:mi:ss') expiry_date,
(case when expiry_date - sysdate > 30
then 'green'
when expiry_date - sysdate <= 30 and expiry_date - sysdate > 7
then 'yellow'
when expiry_date - sysdate <= 7
then 'red'
else ''
end) expiry_color,default_tablespace,temporary_tablespace
from dba_users
"""
oracle_users = tools.oracle_django_query(user, password, url, sql)
# alert日志
oracle_alert_logs = models_oracle.AlertLog.objects.filter(server_type='Oracle',tags=tagsdefault).order_by('-log_time')
if request.method == 'POST':
if request.POST.has_key('select_tags') :
tagsdefault = request.POST.get('select_tags', None).encode("utf-8")
return HttpResponseRedirect('/show_oracle_resource?tagsdefault=%s&redo_range_default=%s' %(tagsdefault,redo_range_default))
else:
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('show_oracle_res.html', {'tagsdefault': tagsdefault,'typedefault':typedefault,'tagsinfo': tagsinfo,'msg_num':msg_num,
'msg_last_content': msg_last_content, 'tim_last': tim_last,
'tbsinfos': tbsinfos, 'undotbsinfos': undotbsinfos,'tmptbsinfos': tmptbsinfos,'oracle_controlfiles':oracle_controlfiles,
'oracle_redo_files':oracle_redo_files,'oracle_redo_cnts':oracle_redo_cnts,'oracle_table_changes':oracle_table_changes,
'oracle_sequences':oracle_sequences,'oracle_users':oracle_users,'oracle_alert_logs':oracle_alert_logs})
@login_required(login_url='/login')
def oracle_profile(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
tags = request.GET.get('tags')
profile_name = request.GET.get('profile_name')
sql = "select host,port,service_name,user,password,user_os,password_os from tab_oracle_servers where tags= '%s' " %tags
oracle = tools.mysql_query(sql)
host = oracle[0][0]
port = oracle[0][1]
service_name = oracle[0][2]
user = oracle[0][3]
password = oracle[0][4]
password = base64.decodestring(password)
url = host + ':' + port + '/' + service_name
sql = """
select profile,resource_name,resource_type,limit,to_char(sysdate,'yyyy-mm-dd') get_date
from dba_profiles where profile = '%s'
""" %profile_name
oracle_profiles = tools.oracle_django_query(user,password,url,sql)
now = tools.now()
if request.method == 'POST':
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('oracle_profile.html',
{'messageinfo_list': messageinfo_list,'msg_num':msg_num, 'oracle_profiles': oracle_profiles, 'profile_name':profile_name,'tags': tags,
'now': now,
'msg_last_content': msg_last_content, 'tim_last': tim_last})
@login_required(login_url='/login')
def oracle_grant(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
tags = request.GET.get('tags')
username = request.GET.get('username')
sql = "select host,port,service_name,user,password,user_os,password_os from tab_oracle_servers where tags= '%s' " %tags
oracle = tools.mysql_query(sql)
host = oracle[0][0]
port = oracle[0][1]
service_name = oracle[0][2]
user = oracle[0][3]
password = oracle[0][4]
password = base64.decodestring(password)
url = host + ':' + port + '/' + service_name
# 角色权限
sql = """
select grantee,
granted_role,
admin_option,
default_role,
to_char(sysdate, 'yyyy-mm-dd') get_date
from dba_role_privs where grantee = '%s'
""" %username
user_roles = tools.oracle_django_query(user,password,url,sql)
# 系统权限
sql = """
select grantee,privilege,admin_option,to_char(sysdate,'yyyy-mm-dd') get_date
from dba_sys_privs where grantee = '%s'
""" %username
sys_privs = tools.oracle_django_query(user,password,url,sql)
# 对象权限
sql = """
select owner,grantee,grantor,table_name,privilege
,grantable,hierarchy,to_char(sysdate,'yyyy-mm-dd') get_date
from dba_tab_privs
where grantee <> 'PUBLIC' and privilege <> 'EXECUTE' and grantee = '%s'
""" %username
tab_privs = tools.oracle_django_query(user,password,url,sql)
now = tools.now()
if request.method == 'POST':
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('oracle_grant.html',
{'messageinfo_list': messageinfo_list, 'msg_num':msg_num,'user_roles': user_roles,'sys_privs':sys_privs,
'tab_privs':tab_privs,'username':username,'tags': tags,'now': now,
'msg_last_content': msg_last_content, 'tim_last': tim_last})
@login_required(login_url='/login')
def show_oracle_rate(request):
messageinfo_list = models_frame.TabAlarmInfo.objects.all()
oracle_rate_list = models_oracle.OracleDbRate.objects.order_by("db_rate")
if request.method == 'POST':
logout(request)
return HttpResponseRedirect('/login/')
if messageinfo_list:
msg_num = len(messageinfo_list)
msg_last = models_frame.TabAlarmInfo.objects.latest('id')
msg_last_content = msg_last.alarm_content
tim_last = (datetime.datetime.now() - msg_last.alarm_time).seconds / 60
else:
msg_num = 0
msg_last_content = ''
tim_last = ''
return render_to_response('show_oracle_rate.html',
{'oracle_rate_list': oracle_rate_list, 'messageinfo_list': messageinfo_list,
'msg_num': msg_num,
'msg_last_content': msg_last_content, 'tim_last': tim_last})
| [
"[email protected]"
] | |
a6610f3481b08658f371b6904911bc55a74f4c12 | ff481a22256d045e28403f4ee65480dc3d6ce1d9 | /gerencia/views.py | 073e9d8eb381f5ced2498a8713c677de275c3ce7 | [] | no_license | redcliver/inomont | 83fa6501cbca74e79fc1a42509e5bb5c05eea2dd | 0b8d6511618b6509a5208c8a5d9f82903de8da87 | refs/heads/master | 2023-04-27T03:22:57.138482 | 2020-01-14T21:22:19 | 2020-01-14T21:22:19 | 221,098,331 | 1 | 0 | null | 2023-04-21T20:40:39 | 2019-11-12T00:51:22 | CSS | UTF-8 | Python | false | false | 65,237 | py | from django.shortcuts import render
from website.models import funcaoModel, cadastroSite, fornecedorModel, colaboradorModel, clienteModel, contaPagarModel, contaReceberModel
import datetime
from django.contrib.auth.models import User
from django.utils.crypto import get_random_string
# Create your views here.
def home(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/home.html', {'title':'Home',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Colaboradores
def colaboradores(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/colaboradores/home.html', {'title':'Home',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def colaboradoresNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('nome') != None:
nome = request.POST.get('nome')
sobrenome = request.POST.get('sobrenome')
cpf = request.POST.get('cpf')
rg = request.POST.get('rg')
dataNasc = request.POST.get('dataNasc')
email = request.POST.get('email')
celular = request.POST.get('celular')
telefone = request.POST.get('telefone')
estadoCivil = request.POST.get('estadoCivil')
funcaoID = request.POST.get('funcaoID')
tempoExperiencia = request.POST.get('tempoExperiencia')
escolaridade = request.POST.get('escolaridade')
resideEmTresLagoas = request.POST.get('resideEmTresLagoas')
trabalhouInomont = request.POST.get('trabalhouInomont')
pqInomont = request.POST.get('pqInomont')
empresa1 = request.POST.get('empresa1')
funcao1 = request.POST.get('funcao1')
periodo1 = request.POST.get('periodo1')
empresa2 = request.POST.get('empresa2')
funcao2 = request.POST.get('funcao2')
periodo2 = request.POST.get('periodo2')
empresa3 = request.POST.get('empresa3')
funcao3 = request.POST.get('funcao3')
periodo3 = request.POST.get('periodo3')
funcaoObj = funcaoModel.objects.filter(id=funcaoID).get()
novoColaborador = cadastroSite(nome=nome,
sobrenome=sobrenome,
telefone=telefone,
celular=celular,
cpf=cpf,
rg=rg,
dataNasc=dataNasc,
email=email,
estadoCivil=estadoCivil,
escolaridade=escolaridade,
ehDeTresLagoas=resideEmTresLagoas,
trabalhouInomont=trabalhouInomont,
pqInomont=pqInomont,
experiencia=tempoExperiencia,
funcao=funcaoObj,
empresa1=empresa1,
funcao1=funcao1,
periodo1=periodo1,
empresa2=empresa2,
funcao2=funcao2,
periodo2=periodo2,
empresa3=empresa3,
funcao3=funcao1,
periodo3=periodo3)
novoColaborador.save()
msgConfirmacao = "Colaborador salvo com sucesso!"
return render (request, 'gerencia/colaboradores/colaboradorNovo.html', {'title':'Novo Colaborador',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
if request.method == 'GET' and request.GET.get('colaboradorSiteID') != None:
colaboradorSiteID = request.GET.get('colaboradorSiteID')
colaboradorSiteObj = cadastroSite.objects.filter(id=colaboradorSiteID).get()
todasFuncoes = funcaoModel.objects.all().order_by('nome')
return render (request, 'gerencia/colaboradores/colaboradorNovo.html', {'title':'Novo Colaborador',
'msgTelaInicial':msgTelaInicial,
'colaboradorSiteObj':colaboradorSiteObj,
'todasFuncoes':todasFuncoes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def colaboradoresVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
colaboradoresCadastrados = colaboradorModel.objects.all().order_by('funcao')
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('colaboradorID') != None:
colaboradorID = request.POST.get('colaboradorID')
colaboradorObj = colaboradorModel.objects.filter(id=colaboradorID).get()
return render (request, 'gerencia/colaboradores/colaboradorVisualizar1.html', {'title':'Visualizar Colaborador',
'msgTelaInicial':msgTelaInicial,
'colaboradorObj':colaboradorObj})
return render (request, 'gerencia/colaboradores/colaboradorVisualizar.html', {'title':'Visualizar Colaborador',
'msgTelaInicial':msgTelaInicial,
'colaboradoresCadastrados':colaboradoresCadastrados})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def colaboradoresSite(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
colaboradoresSite = cadastroSite.objects.filter(estado=1).all().order_by('dataCadastro')
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/colaboradores/colaboradorSite.html', {'title':'Colaboradores Site',
'msgTelaInicial':msgTelaInicial,
'colaboradoresSite':colaboradoresSite})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def colaboradoresSiteVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
colaboradoresSite = cadastroSite.objects.all().order_by('funcao')
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('colaboradorID') != None:
colaboradorID = request.POST.get('colaboradorID')
colaboradorObj = cadastroSite.objects.all().filter(id=colaboradorID).get()
return render (request, 'gerencia/colaboradores/colaboradorSiteVisualizar.html', {'title':'Visualizar Colaborador',
'msgTelaInicial':msgTelaInicial,
'colaboradorObj':colaboradorObj})
return render (request, 'gerencia/colaboradores/colaboradorSiteVisualizar.html', {'title':'Visualizar Colaborador',
'msgTelaInicial':msgTelaInicial,
'colaboradoresSite':colaboradoresSite,
'colaboradorObj':colaboradorObj})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Fornecedores
def fornecedores(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/fornecedores/home.html', {'title':'Home',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def fornecedoresNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST":
nome = request.POST.get('nome')
contatoPrincipal = request.POST.get('contatoPrincipal')
email = request.POST.get('email')
cnpj = request.POST.get('cnpj')
cep = request.POST.get('cep')
telefone = request.POST.get('telefone')
celular = request.POST.get('celular')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf = request.POST.get('uf')
fornecedorObj = fornecedorModel(nome=nome, contatoPrincipal=contatoPrincipal, email=email, cnpj=cnpj, telefone=telefone, celular=celular, cep=cep, endereco=endereco, numero=numero, bairro=bairro, cidade=cidade, uf=uf)
fornecedorObj.save()
msgConfirmacao = "Fornecedor salvo com sucesso!"
return render (request, 'gerencia/fornecedores/fornecedorNovo.html', {'title':'Novo Fornecedor',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/fornecedores/fornecedorNovo.html', {'title':'Novo Fornecedor',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def fornecedoresVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
fornecedores = fornecedorModel.objects.all().order_by('nome')
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('fornecedorID') != None:
fornecedorID = request.POST.get('fornecedorID')
fornecedorObj = fornecedorModel.objects.filter(id=fornecedorID).get()
return render (request, 'gerencia/fornecedores/fornecedorVisualizar1.html', {'title':'Visualizar Fornecedor',
'msgTelaInicial':msgTelaInicial,
'fornecedorObj':fornecedorObj})
return render (request, 'gerencia/fornecedores/fornecedorVisualizar.html', {'title':'Visualizar Fornecedor',
'msgTelaInicial':msgTelaInicial,
'fornecedores':fornecedores})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def fornecedoresEditar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
fornecedores = fornecedorModel.objects.all().order_by('nome')
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('fornecedorID') != None:
fornecedorID = request.POST.get('fornecedorID')
fornecedorObj = fornecedorModel.objects.filter(id=fornecedorID).get()
return render (request, 'gerencia/fornecedores/fornecedorEditar.html', {'title':'Editar Fornecedor',
'msgTelaInicial':msgTelaInicial,
'fornecedorObj':fornecedorObj})
return render (request, 'gerencia/fornecedores/fornecedorVisualizar.html', {'title':'Visualizar Fornecedor',
'msgTelaInicial':msgTelaInicial,
'fornecedores':fornecedores})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def fornecedoresSalvar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST":
fornecedorID = request.POST.get('fornecedorID')
fornecedorObj = fornecedorModel.objects.filter(id=fornecedorID).get()
nome = request.POST.get('nome')
contatoPrincipal = request.POST.get('contatoPrincipal')
email = request.POST.get('email')
cnpj = request.POST.get('cnpj')
cep = request.POST.get('cep')
telefone = request.POST.get('telefone')
celular = request.POST.get('celular')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf = request.POST.get('uf')
fornecedorObj.nome = nome
fornecedorObj.contatoPrincipal = contatoPrincipal
fornecedorObj.email = email
fornecedorObj.cnpj = cnpj
fornecedorObj.cep = cep
fornecedorObj.telefone = telefone
fornecedorObj.celular = celular
fornecedorObj.endereco = endereco
fornecedorObj.numero = numero
fornecedorObj.bairro = bairro
fornecedorObj.cidade = cidade
fornecedorObj.uf = uf
fornecedorObj.save()
msgConfirmacao = "Fornecedor editado com sucesso!"
return render (request, 'gerencia/fornecedores/fornecedorEditar.html', {'title':'Editar Fornecedor',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao,
'fornecedorObj':fornecedorObj})
return render (request, 'gerencia/fornecedores/fornecedorNovo.html', {'title':'Novo Fornecedor',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Funções
def funcaoHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/funcoes/home.html', {'title':'Funeções',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def funcaoNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('nome'):
nome = request.POST.get('nome')
observacao = request.POST.get('observacao')
novaFuncao = funcaoModel(nome=nome, observacao=observacao)
novaFuncao.save()
msgConfirmacao = "Nova função cadastrada com sucesso!"
return render (request, 'gerencia/funcoes/funcaoNovo.html', {'title':'Nova Função',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/funcoes/funcaoNovo.html', {'title':'Nova Função',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def funcaoVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
funcoes = funcaoModel.objects.all().order_by('id')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('funcaoID'):
funcaoID = request.POST.get('funcaoID')
funcaoObj = funcaoModel.objects.filter(id=funcaoID).get()
return render (request, 'gerencia/funcoes/funcaoEditar.html', {'title':'Editar Função',
'msgTelaInicial':msgTelaInicial,
'funcaoObj':funcaoObj})
return render (request, 'gerencia/funcoes/funcaoVisualizar.html', {'title':'Visualizar Função',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def funcaoSalvar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
funcoes = funcaoModel.objects.all().order_by('id')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('funcaoID'):
funcaoID = request.POST.get('funcaoID')
funcaoObj = funcaoModel.objects.filter(id=funcaoID).get()
nome = request.POST.get('nome')
observacao = request.POST.get('observacao')
funcaoObj.nome = nome
funcaoObj.observacao = observacao
funcaoObj.save()
msgConfirmacao = "Função editada com sucesso!"
return render (request, 'gerencia/funcoes/funcaoVisualizar.html', {'title':'Visualizar Função',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/funcoes/funcaoVisualizar.html', {'title':'Visualizar Função',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Clientes
def clientesHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/clientes/home.html', {'title':'Clientes',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def clientesNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('nome'):
nome = request.POST.get('nome')
contato = request.POST.get('contato')
cnpj = request.POST.get('cnpj')
email = request.POST.get('email')
telefone = request.POST.get('telefone')
cep = request.POST.get('cep')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf = request.POST.get('uf')
novoCliente = clienteModel(nome=nome, contato=contato, cnpj=cnpj, email=email, telefone=telefone, cep=cep, endereco=endereco, numero=numero, bairro=bairro, cidade=cidade, uf=uf)
novoCliente.save()
msgConfirmacao = "Novo cliente cadastrado com sucesso!"
return render (request, 'gerencia/clientes/clienteNovo.html', {'title':'Novo Cliente',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/clientes/clienteNovo.html', {'title':'Novo Cliente',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def clientesVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
clientes = clienteModel.objects.all().order_by('id')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('clienteID'):
clienteID = request.POST.get('clienteID')
clienteObj = clienteModel.objects.filter(id=clienteID).get()
return render (request, 'gerencia/clientes/clienteVisualizar1.html', {'title':'Visualizar Cliente',
'msgTelaInicial':msgTelaInicial,
'clienteObj':clienteObj})
return render (request, 'gerencia/clientes/clienteVisualizar.html', {'title':'Visualizar Cliente',
'msgTelaInicial':msgTelaInicial,
'clientes':clientes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def clientesEditar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'GET':
clienteID = request.GET.get('clienteID')
clienteObj = clienteModel.objects.filter(id=clienteID).get()
return render (request, 'gerencia/clientes/clienteEditar.html', {'title':'Editar Cliente',
'msgTelaInicial':msgTelaInicial,
'clienteObj':clienteObj})
if request.method == 'POST' and request.POST.get('clienteID'):
clienteID = request.POST.get('clienteID')
clienteObj = clienteModel.objects.filter(id=clienteID).get()
nome = request.POST.get('nome')
contato = request.POST.get('contato')
cnpj = request.POST.get('cnpj')
email = request.POST.get('email')
telefone = request.POST.get('telefone')
cep = request.POST.get('cep')
endereco = request.POST.get('endereco')
numero = request.POST.get('numero')
bairro = request.POST.get('bairro')
cidade = request.POST.get('cidade')
uf = request.POST.get('uf')
clienteObj.nome = nome
clienteObj.contato = contato
clienteObj.cnpj = cnpj
clienteObj.email = email
clienteObj.telefone = telefone
clienteObj.cep = cep
clienteObj.endereco = endereco
clienteObj.numero = numero
clienteObj.bairro = bairro
clienteObj.cidade = cidade
clienteObj.uf = uf
clienteObj.save()
msgConfirmacao = "Cliente editado com sucesso!"
return render (request, 'gerencia/clientes/clienteVisualizar1.html', {'title':'Visualizar Cliente',
'msgTelaInicial':msgTelaInicial,
'clienteObj':clienteObj,
'msgConfirmacao':msgConfirmacao})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Orçamentos
def orcamentosHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/orcamentos/home.html', {'title':'Orçamentos',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Estoque
def estoqueHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/estoque/home.html', {'title':'Estoque',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Equipamentos
def equipamentosHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/estoque/equipamentos/home.html', {'title':'Serviços',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def equipamentosNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('nome'):
nome = request.POST.get('nome')
observacao = request.POST.get('observacao')
novaFuncao = funcaoModel(nome=nome, observacao=observacao)
novaFuncao.save()
msgConfirmacao = "Novo serviço cadastrado com sucesso!"
return render (request, 'gerencia/estoque/equipamentos/equipamentoNovo.html', {'title':'Novo Serviços',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/estoque/equipamentos/equipamentoNovo.html', {'title':'Novo Serviços',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def equipamentosVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
funcoes = funcaoModel.objects.all().order_by('id')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('funcaoID'):
funcaoID = request.POST.get('funcaoID')
funcaoObj = funcaoModel.objects.filter(id=funcaoID).get()
return render (request, 'gerencia/estoque/equipamentos/equipamentoEditar.html', {'title':'Editar Serviço',
'msgTelaInicial':msgTelaInicial,
'funcaoObj':funcaoObj})
return render (request, 'gerencia/estoque/equipamentos/equipamentoVisualizar.html', {'title':'Visualizar Serviço',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def equipamentosSalvar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
funcoes = funcaoModel.objects.all().order_by('id')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == 'POST' and request.POST.get('funcaoID'):
funcaoID = request.POST.get('funcaoID')
funcaoObj = funcaoModel.objects.filter(id=funcaoID).get()
nome = request.POST.get('nome')
observacao = request.POST.get('observacao')
funcaoObj.nome = nome
funcaoObj.observacao = observacao
funcaoObj.save()
msgConfirmacao = "Função editada com sucesso!"
return render (request, 'gerencia/estoque/equipamentos/equipamentoVisualizar.html', {'title':'Visualizar Serviços',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/estoque/equipamentos/equipamentoVisualizar.html', {'title':'Visualizar Serviços',
'msgTelaInicial':msgTelaInicial,
'funcoes':funcoes})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Contas
def contasHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/contas/home.html', {'title':'Contas',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasRelatorioHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/contas/relatorios.html', {'title':'Relatórios',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasPagarHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/contas/pagar/home.html', {'title':'Contas a pagar',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasReceberHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/contas/receber/home.html', {'title':'Contas a receber',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasPagarNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('nome') != None:
nome = request.POST.get('nome')
dataVencimento = request.POST.get('dataVencimento')
if request.POST.get('fixa') != None:
fixa = "1"
else:
fixa = "2"
observacao = request.POST.get('observacao')
valor = request.POST.get('valor')
novaContaPagar = contaPagarModel(nome=nome, observacao=observacao, dataVencimento=dataVencimento, fixa=fixa, valor=valor)
novaContaPagar.save()
msgConfirmacao = "Conta registrada com sucesso!"
return render (request, 'gerencia/contas/pagar/pagarNovo.html', {'title':'Nova conta a pagar',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/contas/pagar/pagarNovo.html', {'title':'Nova conta a pagar',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasPagarVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
contas = contaPagarModel.objects.all()
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('contaID') != None:
contaID = request.POST.get('contaID')
contaObj = contaPagarModel.objects.filter(id=contaID).get()
return render (request, 'gerencia/contas/pagar/pagarVisualizar.html', {'title':'Visualizar conta a pagar',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj})
return render (request, 'gerencia/contas/pagar/pagarBusca.html', {'title':'Visualizar conta a pagar',
'msgTelaInicial':msgTelaInicial,
'contas':contas})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasPagarEditar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
try:
contaID = request.GET.get('contaID')
contaObj = contaPagarModel.objects.filter(id=contaID).get()
except:
print("Erro ao procurar com GET")
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('contaID') != None:
contaID = request.POST.get('contaID')
contaObj = contaPagarModel.objects.filter(id=contaID).get()
nome = request.POST.get('nome')
dataVencimento = request.POST.get('dataVencimento')
if request.POST.get('fixa') != None:
fixa = "1"
else:
fixa = "2"
observacao = request.POST.get('observacao')
valor = request.POST.get('valor')
contaObj.nome = nome
contaObj.dataVencimento = dataVencimento
contaObj.fixa = fixa
contaObj.observacao = observacao
contaObj.valor = valor
contaObj.save()
msgConfirmacao = "Conta alterada com sucesso!"
contas = contaPagarModel.objects.all()
return render (request, 'gerencia/contas/pagar/pagarBusca.html', {'title':'Editar conta a pagar',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj,
'msgConfirmacao':msgConfirmacao,
'contas':contas})
return render (request, 'gerencia/contas/pagar/pagarEditar.html', {'title':'Editar conta a pagar',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasReceberNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get != None:
nome = request.POST.get('nome')
dataVencimento = request.POST.get('dataVencimento')
if request.POST.get('fixa') != None:
fixa = "1"
else:
fixa = "2"
observacao = request.POST.get('observacao')
valor = request.POST.get('valor')
novaContaReceber = contaReceberModel(nome=nome, observacao=observacao, dataVencimento=dataVencimento, fixa=fixa, valor=valor)
novaContaReceber.save()
msgConfirmacao = "Conta registrada com sucesso!"
return render (request, 'gerencia/contas/receber/receberNovo.html', {'title':'Nova conta a receber',
'msgTelaInicial':msgTelaInicial,
'msgConfirmacao':msgConfirmacao})
return render (request, 'gerencia/contas/receber/receberNovo.html', {'title':'Nova conta a receber',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasReceberVisualizar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
contas = contaReceberModel.objects.all()
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('contaID') != None:
contaID = request.POST.get('contaID')
contaObj = contaReceberModel.objects.filter(id=contaID).get()
return render (request, 'gerencia/contas/receber/receberVisualizar.html', {'title':'Visualizar conta a receber',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj})
return render (request, 'gerencia/contas/receber/receberBusca.html', {'title':'Visualizar conta a receber',
'msgTelaInicial':msgTelaInicial,
'contas':contas})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def contasReceberEditar(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
try:
contaID = request.GET.get('contaID')
contaObj = contaReceberModel.objects.filter(id=contaID).get()
except:
print("Erro ao procurar com GET")
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
if request.method == "POST" and request.POST.get('contaID') != None:
contaID = request.POST.get('contaID')
contaObj = contaReceberModel.objects.filter(id=contaID).get()
nome = request.POST.get('nome')
dataVencimento = request.POST.get('dataVencimento')
if request.POST.get('fixa') != None:
fixa = "1"
else:
fixa = "2"
observacao = request.POST.get('observacao')
valor = request.POST.get('valor')
contaObj.nome = nome
contaObj.dataVencimento = dataVencimento
contaObj.fixa = fixa
contaObj.observacao = observacao
contaObj.valor = valor
contaObj.save()
msgConfirmacao = "Conta alterada com sucesso!"
contas = contaPagarModel.objects.all()
return render (request, 'gerencia/contas/receber/receberBusca.html', {'title':'Editar conta a receber',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj,
'msgConfirmacao':msgConfirmacao,
'contas':contas})
return render (request, 'gerencia/contas/receber/receberEditar.html', {'title':'Editar conta a receber',
'msgTelaInicial':msgTelaInicial,
'contaObj':contaObj})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
#Caixa
def caixaHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/caixa/home.html', {'title':'Caixa',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def caixaEntradaNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/caixa/entradaNovo.html', {'title':'Nova entrada',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def caixaSaidaNovo(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/caixa/saidaNovo.html', {'title':'Nova saída',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'})
def balancoHome(request):
if request.user.is_authenticated:
if request.user.last_name == "GERENCIA":
now = datetime.datetime.now().strftime('%H')
now = int(now)
msgTelaInicial = "Olá, " + request.user.get_short_name()
if now >= 4 and now <= 11:
msgTelaInicial = "Bom dia, " + request.user.get_short_name()
elif now > 11 and now < 18:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
elif now >= 18 and now < 4:
msgTelaInicial = "Boa Tarde, " + request.user.get_short_name()
return render (request, 'gerencia/caixa/balanco.html', {'title':'Balanço',
'msgTelaInicial':msgTelaInicial})
return render (request, 'site/login.html', {'title':'Login'})
return render (request, 'site/login.html', {'title':'Login'}) | [
"[email protected]"
] | |
a59ca1f99c5f53cd1737d4fcb2670dc70f7ec927 | ca22c441ec0eabf61b3b415fc9be8453855481cf | /rapid/__init__.py | b47d1e365bbc7cdc678e8f7640ddc873208950a7 | [
"MIT"
] | permissive | linhao1998/rapid | cc1d45a119a4c7c3c384ad708c3220226c5c7edd | 1611e47fffac0f61e6c07ad5388eb2368a426f06 | refs/heads/main | 2023-06-16T16:24:57.498657 | 2021-07-08T14:48:47 | 2021-07-08T14:48:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | from gym.envs.registration import register
register(
id='EpisodeInvertedPendulum-v2',
entry_point='rapid.mujoco_envs:EpisodeInvertedPendulumEnv',
max_episode_steps=1000,
reward_threshold=950.0,
)
register(
id='EpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:EpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='DensityEpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:DensityEpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='ViscosityEpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:ViscosityEpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='EpisodeWalker2d-v2',
max_episode_steps=1000,
entry_point='rapid.mujoco_envs:EpisodeWalker2dEnv',
)
register(
id='EpisodeHopper-v2',
entry_point='rapid.mujoco_envs:EpisodeHopperEnv',
max_episode_steps=1000,
reward_threshold=3800.0,
)
| [
"[email protected]"
] | |
5e8e9e4cc04b87577c04e4b09ce745dd68a85d04 | 706fcc0630a2a1befa32e8d0e9e0a61978dcc947 | /config.py | 7fcc7def7a3c78d71daf7c805bc812e5aabcc542 | [] | no_license | paulgowdy/hal_split | a8f731a5a6e77f605d45de345d1c48bbc774738d | f618a6b1a132e192f4778c237a92c86f24540ca0 | refs/heads/master | 2022-11-17T00:51:37.343265 | 2020-07-07T22:25:49 | 2020-07-07T22:25:49 | 277,934,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | BOARD_SIZE = 9
MAX_NB_SHIPS = 2
NB_SHIP_ACTIONS = 5
#TRAIN_EPISODES = 10
STEPS_PER_EP = 200
GAMMA = 0.99
PPO_BATCHES = 10000000
PPO_STEPS = 32
LOSS_CLIPPING = 0.2
ENTROPY_LOSS = 5e-2
| [
"[email protected]"
] | |
7dbd29bffaf83e67b074c5c83a4b80d149c08915 | b5d6219ac738ed05485439540f38d63d21694c51 | /DAT/ED6_DT01/T0401.帕赛尔农场.py | a052afc836ef6b9539bb500251eaf69edad7b457 | [] | no_license | otoboku/ED6-FC-Steam-CN | f87ffb2ff19f9272b986fa32a91bec360c21dffa | c40d9bc5aaea9446dda27e7b94470d91cb5558c5 | refs/heads/master | 2021-01-21T02:37:30.443986 | 2015-11-27T07:41:41 | 2015-11-27T07:41:41 | 46,975,651 | 1 | 0 | null | 2015-11-27T10:58:43 | 2015-11-27T10:58:42 | null | UTF-8 | Python | false | false | 55,311 | py | from ED6ScenarioHelper import *
def main():
# 帕赛尔农场
CreateScenaFile(
FileName = 'T0401 ._SN',
MapName = 'Rolent',
Location = 'T0401.x',
MapIndex = 13,
MapDefaultBGM = "ed60084",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'魔兽', # 9
'魔兽', # 10
'魔兽', # 11
'魔兽', # 12
'魔兽', # 13
'魔兽', # 14
'魔兽', # 15
'缇欧', # 16
'维鲁', # 17
'查儿', # 18
'弗兰兹', # 19
'汉娜', # 20
'艾丝蒂尔', # 21
'牛', # 22
'牛', # 23
'目标用摄像机', # 24
'米尔西街道方向', # 25
)
DeclEntryPoint(
Unknown_00 = 21000,
Unknown_04 = 0,
Unknown_08 = 24000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 8000,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 3000,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 13,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
DeclEntryPoint(
Unknown_00 = 21000,
Unknown_04 = 0,
Unknown_08 = 24000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 8000,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 3000,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 13,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT09/CH10100 ._CH', # 00
'ED6_DT09/CH10101 ._CH', # 01
'ED6_DT07/CH02480 ._CH', # 02
'ED6_DT07/CH01060 ._CH', # 03
'ED6_DT07/CH01070 ._CH', # 04
'ED6_DT07/CH01020 ._CH', # 05
'ED6_DT07/CH01030 ._CH', # 06
'ED6_DT07/CH00100 ._CH', # 07
'ED6_DT07/CH01710 ._CH', # 08
'ED6_DT07/CH00107 ._CH', # 09
'ED6_DT07/CH00102 ._CH', # 0A
)
AddCharChipPat(
'ED6_DT09/CH10100P._CP', # 00
'ED6_DT09/CH10101P._CP', # 01
'ED6_DT07/CH02480P._CP', # 02
'ED6_DT07/CH01060P._CP', # 03
'ED6_DT07/CH01070P._CP', # 04
'ED6_DT07/CH01020P._CP', # 05
'ED6_DT07/CH01030P._CP', # 06
'ED6_DT07/CH00100P._CP', # 07
'ED6_DT07/CH01710P._CP', # 08
'ED6_DT07/CH00107P._CP', # 09
'ED6_DT07/CH00102P._CP', # 0A
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x0,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -75800,
Z = 0,
Y = 2400,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x2,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 1730,
Z = 0,
Y = 24300,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x3,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 1730,
Z = 0,
Y = 23000,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x4,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -75800,
Z = 0,
Y = 2400,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x5,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = -75800,
Z = 0,
Y = 2400,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x7,
NpcIndex = 0x101,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 39010,
Z = 600,
Y = 23300,
Direction = 180,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x105,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 4,
)
DeclNpc(
X = 40980,
Z = 600,
Y = 23300,
Direction = 180,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x105,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 4,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x80,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 23910,
Z = 30,
Y = 66820,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0xFF,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclEvent(
X = 41200,
Y = -500,
Z = 21800,
Range = 48300,
Unknown_10 = 0x3E8,
Unknown_14 = 0x4FB0,
Unknown_18 = 0x0,
Unknown_1C = 11,
)
DeclEvent(
X = 34900,
Y = -1000,
Z = 33900,
Range = 43000,
Unknown_10 = 0x3E8,
Unknown_14 = 0xAD70,
Unknown_18 = 0x0,
Unknown_1C = 12,
)
DeclEvent(
X = 38700,
Y = -500,
Z = 37000,
Range = 35200,
Unknown_10 = 0x3E8,
Unknown_14 = 0xB4DC,
Unknown_18 = 0x0,
Unknown_1C = 29,
)
DeclEvent(
X = 26000,
Y = -500,
Z = 26000,
Range = 19000,
Unknown_10 = 0x3E8,
Unknown_14 = 0x7148,
Unknown_18 = 0x0,
Unknown_1C = 30,
)
DeclEvent(
X = 42360,
Y = -500,
Z = 15300,
Range = 50900,
Unknown_10 = 0x3E8,
Unknown_14 = 0x490C,
Unknown_18 = 0x0,
Unknown_1C = 31,
)
DeclEvent(
X = 35830,
Y = -1000,
Z = 26140,
Range = 34270,
Unknown_10 = 0x3E8,
Unknown_14 = 0x5D2A,
Unknown_18 = 0x0,
Unknown_1C = 13,
)
DeclEvent(
X = 39000,
Y = -500,
Z = 42000,
Range = 1000,
Unknown_10 = 0x3E8,
Unknown_14 = 0x0,
Unknown_18 = 0x40,
Unknown_1C = 18,
)
DeclEvent(
X = 22700,
Y = -500,
Z = 25300,
Range = 1000,
Unknown_10 = 0x3E8,
Unknown_14 = 0x0,
Unknown_18 = 0x40,
Unknown_1C = 19,
)
DeclEvent(
X = 46100,
Y = -500,
Z = 15200,
Range = 1000,
Unknown_10 = 0x3E8,
Unknown_14 = 0x0,
Unknown_18 = 0x40,
Unknown_1C = 20,
)
ScpFunction(
"Function_0_486", # 00, 0
"Function_1_552", # 01, 1
"Function_2_58D", # 02, 2
"Function_3_5A3", # 03, 3
"Function_4_5DE", # 04, 4
"Function_5_5E4", # 05, 5
"Function_6_5EA", # 06, 6
"Function_7_61D", # 07, 7
"Function_8_62C", # 08, 8
"Function_9_643", # 09, 9
"Function_10_857", # 0A, 10
"Function_11_861", # 0B, 11
"Function_12_92E", # 0C, 12
"Function_13_C37", # 0D, 13
"Function_14_F16", # 0E, 14
"Function_15_1178", # 0F, 15
"Function_16_119B", # 10, 16
"Function_17_11A3", # 11, 17
"Function_18_11B8", # 12, 18
"Function_19_11CC", # 13, 19
"Function_20_11E0", # 14, 20
"Function_21_11F4", # 15, 21
"Function_22_225A", # 16, 22
"Function_23_229E", # 17, 23
"Function_24_230E", # 18, 24
"Function_25_2323", # 19, 25
"Function_26_2338", # 1A, 26
"Function_27_234D", # 1B, 27
"Function_28_238A", # 1C, 28
"Function_29_2392", # 1D, 29
"Function_30_2424", # 1E, 30
"Function_31_24A2", # 1F, 31
"Function_32_2534", # 20, 32
"Function_33_2A62", # 21, 33
"Function_34_2A6A", # 22, 34
)
def Function_0_486(): pass
label("Function_0_486")
SetChrFlags(0x9, 0x40)
SetChrFlags(0xA, 0x40)
SetChrFlags(0xB, 0x40)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_END)), "loc_4C7")
OP_A2(0x0)
OP_A3(0x1)
OP_A3(0x2)
ClearChrFlags(0x9, 0x8)
ClearChrFlags(0x9, 0x80)
SetChrPos(0x9, 39000, 0, 42000, 270)
OP_43(0x9, 0x3, 0x0, 0x2)
label("loc_4C7")
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_END)),
(1, "loc_4DB"),
(102, "loc_53A"),
(103, "loc_53A"),
(SWITCH_DEFAULT, "loc_551"),
)
label("loc_4DB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_52A")
SetMapFlags(0x400000)
ClearMapFlags(0x1)
OP_6D(28000, 0, 41000, 0)
OP_6C(200000, 0)
OP_6B(5000, 0)
SetChrFlags(0x101, 0x80)
SetChrFlags(0x102, 0x80)
FadeToBright(3000, 0)
Event(0, 6)
Jump("loc_537")
label("loc_52A")
FadeToBright(3000, 0)
Event(0, 9)
label("loc_537")
Jump("loc_551")
label("loc_53A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_54E")
Event(0, 14)
label("loc_54E")
Jump("loc_551")
label("loc_551")
Return()
# Function_0_486 end
def Function_1_552(): pass
label("Function_1_552")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x29), scpexpr(EXPR_PUSH_LONG, 0x393), scpexpr(EXPR_EQU), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_570")
OP_4F(0x1, (scpexpr(EXPR_PUSH_LONG, 0xF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
label("loc_570")
OP_16(0x2, 0xFA0, 0xFFFE8518, 0xFFFE8900, 0x30004)
OP_1B(0x4, 0x0, 0x22)
OP_22(0x1CF, 0x0, 0x64)
Return()
# Function_1_552 end
def Function_2_58D(): pass
label("Function_2_58D")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5A2")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("Function_2_58D")
label("loc_5A2")
Return()
# Function_2_58D end
def Function_3_5A3(): pass
label("Function_3_5A3")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_5DD")
OP_95(0xFE, 0xFFFFFC18, 0x0, 0x0, 0x258, 0x640)
OP_95(0xFE, 0x3E8, 0x0, 0x0, 0x258, 0x640)
Jump("Function_3_5A3")
label("loc_5DD")
Return()
# Function_3_5A3 end
def Function_4_5DE(): pass
label("Function_4_5DE")
OP_22(0x190, 0x0, 0x64)
Return()
# Function_4_5DE end
def Function_5_5E4(): pass
label("Function_5_5E4")
OP_22(0x191, 0x0, 0x64)
Return()
# Function_5_5E4 end
def Function_6_5EA(): pass
label("Function_6_5EA")
EventBegin(0x0)
Sleep(500)
OP_43(0x0, 0x2, 0x0, 0x8)
OP_43(0x0, 0x1, 0x0, 0x7)
OP_6C(225000, 9000)
FadeToDark(1000, 0, -1)
OP_0D()
NewScene("ED6_DT01/T0411 ._SN", 1, 0, 0)
IdleLoop()
Return()
# Function_6_5EA end
def Function_7_61D(): pass
label("Function_7_61D")
Sleep(2000)
OP_6B(3000, 9000)
Return()
# Function_7_61D end
def Function_8_62C(): pass
label("Function_8_62C")
Sleep(2000)
OP_6D(16700, 0, 18800, 9000)
Return()
# Function_8_62C end
def Function_9_643(): pass
label("Function_9_643")
SetMapFlags(0x400000)
ClearMapFlags(0x1)
OP_6D(26700, 0, 14500, 0)
OP_6C(225000, 0)
OP_6B(3500, 0)
SetChrPos(0x101, 28000, 0, 14200, 90)
SetChrPos(0x102, 25350, 570, 15020, 90)
EventBegin(0x0)
OP_43(0x0, 0x1, 0x0, 0xA)
OP_8E(0x102, 0x6E28, 0x0, 0x3C28, 0x5DC, 0x0)
Sleep(1000)
ChrTalk(
0x101,
"#004F哇~天色已经这么暗了。\x02",
)
CloseMessageWindow()
TurnDirection(0x0, 0x1, 400)
ChrTalk(
0x101,
(
"#000F喂,约修亚。\x01",
"该怎么巡逻比较好呢?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x1, 0x0, 400)
ChrTalk(
0x102,
(
"#010F是呀……\x02\x03",
"#010F屋子周围、田地里、牲口棚\x01",
"和温室都要巡视一遍吧。\x02\x03",
"#010F这样整个农场都能照顾到了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F嗯,知道了。\x02\x03",
"#001F好的~我们出发吧!\x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
EventEnd(0x0)
ClearMapFlags(0x400000)
Return()
# Function_9_643 end
def Function_10_857(): pass
label("Function_10_857")
OP_6B(3000, 4500)
Return()
# Function_10_857 end
def Function_11_861(): pass
label("Function_11_861")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_92D")
OP_A2(0x231)
OP_28(0x2, 0x1, 0x10)
EventBegin(0x0)
OP_6D(48090, 480, 19550, 3000)
Fade(1000)
SetChrPos(0x101, 43760, 280, 21120, 135)
SetChrPos(0x102, 42420, 370, 21480, 135)
OP_6D(43760, 280, 21120, 0)
OP_6C(315000, 0)
Sleep(1000)
OP_62(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_63(0x101)
ChrTalk(
0x101,
"#000F……魔兽好像也不在这里。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F是啊,到别的地方看看吧。\x02",
)
CloseMessageWindow()
EventEnd(0x0)
label("loc_92D")
Return()
# Function_11_861 end
def Function_12_92E(): pass
label("Function_12_92E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_C36")
OP_A2(0x233)
OP_28(0x2, 0x1, 0x40)
EventBegin(0x0)
OP_6D(39020, -300, 38660, 2000)
Fade(1000)
SetChrPos(0x101, 42390, -40, 37580, 270)
SetChrPos(0x102, 42500, 30, 38900, 270)
OP_6D(40310, -300, 38250, 0)
OP_6C(135000, 0)
Sleep(1000)
ChrTalk(
0x101,
(
"#000F#4P真安静啊~……\x01",
"只能听到虫子的声音。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F#4P看样子那些魔兽\x01",
"还没有进入菜园里面。\x02\x03",
"#010F是因为我们在巡逻吗?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
(
"#501F#4P对了,约修亚。\x01",
"你小时候有没有听过这种说法?\x02\x03",
"#501F就是说,\x01",
"婴儿是从白菜地里长出来的。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#017F#4P又突然说这些了……\x02\x03",
"#010F我倒是听说过\x01",
"是长着银色翅膀的天使送过来的……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#501F#4P嗯……不同的地方说法也不同啊。\x02\x03",
"#501F…………………………\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F#4P…………………………\x02\x03",
"#010F我们继续巡逻吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F#4P嗯。\x02",
)
CloseMessageWindow()
EventEnd(0x0)
label("loc_C36")
Return()
# Function_12_92E end
def Function_13_C37(): pass
label("Function_13_C37")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_F15")
OP_A2(0x233)
OP_A2(0x234)
OP_28(0x2, 0x1, 0x80)
EventBegin(0x0)
SetMapFlags(0x400000)
ClearMapFlags(0x1)
SetChrPos(0x9, 24900, 50, 30180, 86)
ClearChrFlags(0x9, 0x8)
ClearChrFlags(0x9, 0x80)
OP_43(0x9, 0x3, 0x0, 0x2)
ChrTalk(
0x101,
"#004F啊……\x02",
)
CloseMessageWindow()
Sleep(100)
Fade(1000)
SetChrPos(0x101, 35400, 350, 25540, 267)
SetChrPos(0x102, 35360, 340, 24500, 261)
def lambda_CCC():
label("loc_CCC")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("loc_CCC")
QueueWorkItem2(0x101, 1, lambda_CCC)
def lambda_CDD():
label("loc_CDD")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("loc_CDD")
QueueWorkItem2(0x102, 1, lambda_CDD)
OP_0D()
def lambda_CEF():
OP_6D(34540, 80, 30070, 3000)
ExitThread()
QueueWorkItem(0x101, 2, lambda_CEF)
def lambda_D07():
OP_6C(45000, 4000)
ExitThread()
QueueWorkItem(0x101, 3, lambda_D07)
def lambda_D17():
OP_8E(0x9, 0x8656, 0x46, 0x758A, 0xBB8, 0x0)
ExitThread()
QueueWorkItem(0x9, 1, lambda_D17)
WaitChrThread(0x9, 0x1)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
TurnDirection(0x9, 0x101, 200)
Sleep(1000)
OP_95(0x9, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
OP_22(0x194, 0x0, 0x64)
ChrTalk(
0x9,
"咪呜!\x02",
)
CloseMessageWindow()
OP_62(0x9, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
OP_22(0x81, 0x0, 0x64)
OP_8E(0x9, 0xAF64, 0x118, 0x9042, 0x2EE0, 0x0)
SetChrPos(0x9, 39000, 0, 42000, 270)
ChrTalk(
0x101,
"#004F啊,逃跑了!\x02",
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_8E(0x101, 0x89C6, 0x140, 0x6EAA, 0x1388, 0x0)
OP_8C(0x101, 45, 400)
OP_44(0x101, 0xFF)
ChrTalk(
0x101,
"#005F喂!给我等一下!\x02",
)
CloseMessageWindow()
OP_8E(0x102, 0x8994, 0x136, 0x68CE, 0x7D0, 0x0)
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F气息还没有消失……\x02\x03",
"#012F那只魔兽应该还在菜园里面。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#009F哼哼,正合我意……\x02\x03",
"#005F绝对要抓住它!\x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x9, 0xFF)
OP_A2(0x0)
OP_A3(0x1)
OP_A3(0x2)
Sleep(50)
EventEnd(0x4)
ClearMapFlags(0x400000)
OP_43(0x9, 0x3, 0x0, 0x2)
label("loc_F15")
Return()
# Function_13_C37 end
def Function_14_F16(): pass
label("Function_14_F16")
OP_A2(0x233)
OP_A2(0x234)
OP_28(0x2, 0x1, 0x80)
EventBegin(0x0)
SetMapFlags(0x400000)
ClearMapFlags(0x1)
SetChrPos(0x9, 24100, 0, 54800, 0)
ClearChrFlags(0x9, 0x8)
ClearChrFlags(0x9, 0x80)
OP_43(0x9, 0x3, 0x0, 0x2)
OP_90(0x101, 0x3E8, 0x0, 0x0, 0x7D0, 0x0)
TurnDirection(0x101, 0x9, 400)
ChrTalk(
0x101,
"#004F啊……\x02",
)
CloseMessageWindow()
OP_43(0x101, 0x1, 0x0, 0x11)
OP_43(0x102, 0x1, 0x0, 0x11)
OP_43(0x9, 0x1, 0x0, 0xF)
OP_8E(0x9, 0x5BCC, 0x0, 0x9C40, 0xBB8, 0x0)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
TurnDirection(0x9, 0x101, 200)
Sleep(1000)
OP_95(0x9, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
OP_22(0x194, 0x0, 0x64)
ChrTalk(
0x9,
"咪呜!\x02",
)
CloseMessageWindow()
OP_43(0x101, 0x1, 0x0, 0x10)
OP_62(0x9, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
OP_22(0x81, 0x0, 0x64)
OP_8E(0x9, 0x7148, 0x0, 0x7148, 0x2EE0, 0x0)
SetChrPos(0x9, 39000, 0, 42000, 270)
ChrTalk(
0x101,
"#004F啊,逃跑了!\x02",
)
CloseMessageWindow()
OP_8E(0x101, 0x55BE, 0x0, 0x98F8, 0x1388, 0x0)
OP_8C(0x101, 135, 400)
ChrTalk(
0x101,
"#005F喂!给我等一下!\x02",
)
CloseMessageWindow()
OP_44(0x102, 0xFF)
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F气息还没有消失……\x02\x03",
"#012F那只魔兽应该还在菜园里面。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#009F哼哼,正合我意……\x02\x03",
"#005F绝对要抓住它!\x02",
)
)
CloseMessageWindow()
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
OP_44(0x9, 0xFF)
OP_A2(0x0)
OP_A3(0x1)
OP_A3(0x2)
EventEnd(0x0)
ClearMapFlags(0x400000)
OP_43(0x9, 0x3, 0x0, 0x2)
Return()
# Function_14_F16 end
def Function_15_1178(): pass
label("Function_15_1178")
OP_6D(24100, 0, 47800, 1000)
OP_6D(23500, 0, 40000, 4000)
Return()
# Function_15_1178 end
def Function_16_119B(): pass
label("Function_16_119B")
OP_69(0x101, 0x3E8)
Return()
# Function_16_119B end
def Function_17_11A3(): pass
label("Function_17_11A3")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_11B7")
TurnDirection(0xFE, 0x9, 0)
OP_48()
Jump("Function_17_11A3")
label("loc_11B7")
Return()
# Function_17_11A3 end
def Function_18_11B8(): pass
label("Function_18_11B8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_11CB")
Call(0, 21)
Call(0, 29)
label("loc_11CB")
Return()
# Function_18_11B8 end
def Function_19_11CC(): pass
label("Function_19_11CC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_11DF")
Call(0, 21)
Call(0, 30)
label("loc_11DF")
Return()
# Function_19_11CC end
def Function_20_11E0(): pass
label("Function_20_11E0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_11F3")
Call(0, 21)
Call(0, 31)
label("loc_11F3")
Return()
# Function_20_11E0 end
def Function_21_11F4(): pass
label("Function_21_11F4")
EventBegin(0x0)
ClearMapFlags(0x1)
TurnDirection(0x0, 0x9, 0)
TurnDirection(0x1, 0x9, 0)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_22(0x194, 0x0, 0x64)
OP_44(0x9, 0xFF)
OP_95(0x9, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
TurnDirection(0x9, 0x0, 400)
ChrTalk(
0x101,
(
"#006F太好了,终于抓住了!\x02\x03",
"#006F这次一定要给你们点颜色看看!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#012F接下来才是动真格的。\x01",
"切记不可疏忽大意啊!\x02",
)
)
CloseMessageWindow()
Battle(0x393, 0x0, 0x0, 0x2, 0xFF)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_END)),
(1, "loc_1310"),
(3, "loc_1315"),
(0, "loc_1316"),
(SWITCH_DEFAULT, "loc_2259"),
)
label("loc_1310")
OP_B4(0x0)
Jump("loc_2259")
label("loc_1315")
Return()
label("loc_1316")
EventBegin(0x0)
FadeToBright(2000, 0)
ClearChrFlags(0xA, 0x8)
ClearChrFlags(0xA, 0x80)
OP_43(0xA, 0x3, 0x0, 0x2)
ClearChrFlags(0xB, 0x8)
ClearChrFlags(0xB, 0x80)
OP_43(0xB, 0x3, 0x0, 0x2)
ClearChrFlags(0xC, 0x8)
ClearChrFlags(0xC, 0x80)
OP_43(0xC, 0x3, 0x0, 0x2)
ClearChrFlags(0xD, 0x8)
ClearChrFlags(0xD, 0x80)
OP_43(0xD, 0x3, 0x0, 0x2)
ClearChrFlags(0xE, 0x8)
ClearChrFlags(0xE, 0x80)
OP_43(0xE, 0x3, 0x0, 0x2)
SetChrPos(0xC, 33150, 0, 16129, 225)
SetChrPos(0xD, 33390, 0, 15210, 270)
SetChrPos(0xE, 32990, 0, 14530, 315)
SetChrPos(0x12, 29700, 0, 16600, 0)
SetChrPos(0xF, 29000, 0, 14200, 0)
SetChrPos(0x10, 28100, 0, 15300, 0)
SetChrPos(0x13, 28300, 0, 16400, 0)
SetChrPos(0x11, 29300, 0, 17100, 0)
SetChrPos(0x101, 30920, 0, 15780, 270)
SetChrPos(0x102, 30630, 0, 14650, 315)
TurnDirection(0xF, 0xE, 0)
TurnDirection(0x10, 0xD, 0)
TurnDirection(0x11, 0xE, 0)
TurnDirection(0x12, 0x101, 0)
TurnDirection(0x13, 0x102, 0)
OP_44(0x12, 0xFF)
OP_44(0xF, 0xFF)
OP_44(0x10, 0xFF)
OP_44(0x13, 0xFF)
OP_44(0x11, 0xFF)
OP_44(0xC, 0xFF)
OP_44(0xD, 0xFF)
OP_44(0xE, 0xFF)
OP_43(0x11, 0x3, 0x0, 0x17)
OP_6D(30470, 0, 16280, 0)
OP_6C(315000, 0)
OP_6B(3000, 0)
OP_6B(2800, 3000)
OP_22(0x194, 0x0, 0x64)
ChrTalk(
0xC,
"咪呜~~……\x02",
)
CloseMessageWindow()
OP_43(0x10, 0x3, 0x0, 0x16)
OP_22(0x194, 0x0, 0x64)
ChrTalk(
0xE,
"咪~~……\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
"哎呀,真不愧是游击士啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"这么轻松就把\x01",
"这群敏捷的家伙抓住了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F#4P嘿嘿,过奖了。\x02\x03",
"#000F话说回来,该怎么处理它们呢?\x02",
)
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0x18), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Menu(
0,
10,
10,
0,
(
"『应该不会再做坏事了吧……』\x01", # 0
"『……非要把它们杀掉不可吗?』\x01", # 1
)
)
MenuEnd(0x0)
OP_4F(0x28, (scpexpr(EXPR_PUSH_LONG, 0xFFFF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_5F(0x0)
OP_56(0x0)
FadeToBright(300, 0)
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_1688"),
(1, "loc_188C"),
(SWITCH_DEFAULT, "loc_1A26"),
)
label("loc_1688")
ChrTalk(
0x101,
(
"#000F#4P已经教训过它们了,\x01",
"应该不会再做坏事了吧……\x02",
)
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F#3P艾丝蒂尔……\x01",
"你怎么能感情用事呢?\x02\x03",
"#012F我们不是为了打倒魔兽而来的吗?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
"#003F#4P可、可是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#015F#3P而且,我们这次是\x01",
"代替父亲来执行任务的……\x02\x03",
"#015F如果下次再出现同类的事件,\x01",
"你打算怎么向协会交代?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#007F#4P唔唔……\x01",
"说的也是……\x02",
)
)
CloseMessageWindow()
Jump("loc_1A26")
label("loc_188C")
ChrTalk(
0x101,
"#003F#4P……非要把它们杀掉不可吗?\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F#3P这当然无庸置疑了,艾丝蒂尔。\x01",
"我们是为了打倒魔兽而来的。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x102, 400)
ChrTalk(
0x101,
"#003F#4P可、可是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#015F#3P游击士的使命\x01",
"是保卫百姓、维护正义……\x02\x03",
"#015F绝对不能存有同情魔兽的心态。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#007F#4P唔唔……\x01",
"说的也是……\x02",
)
)
CloseMessageWindow()
Jump("loc_1A26")
label("loc_1A26")
ChrTalk(
0xF,
"……………………\x02",
)
CloseMessageWindow()
ChrTalk(
0xF,
(
"算了算了,\x01",
"反正受害的只有我们家种的菜而已……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xF,
"就放了它们吧?\x02",
)
CloseMessageWindow()
ChrTalk(
0x13,
"是啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0x13,
(
"反正它们已经得到应有的教训,\x01",
"这件事就算了吧。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x101, 0x10, 400)
ChrTalk(
0x101,
"#501F#4P缇欧,阿姨……\x02",
)
CloseMessageWindow()
TurnDirection(0x102, 0x13, 400)
ChrTalk(
0x102,
"#012F#3P但是……\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
"……我也反对杀掉它们。\x02",
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"它们虽然是魔兽,\x01",
"却也和我们生活在同一片土地上啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"有时候还是要互相忍让的,\x01",
"大家和睦相处不是很好吗。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"约修亚……\x01",
"这次就放了它们吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#015F#3P……………………\x02\x03",
"#010F……我明白了。\x02\x03",
"#010F既然受害者都同意放过它们,\x01",
"那我也没有反对的理由。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"真是抱歉,\x01",
"还让你们特地来这里。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x12,
(
"我们以后也要加固栅栏,\x01",
"想办法避免再遇到这样的事情。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#001F#4P那就这样决定了。\x02",
)
CloseMessageWindow()
TurnDirection(0x101, 0xD, 400)
TurnDirection(0x102, 0xD, 400)
ChrTalk(
0x101,
(
"#006F就是这么回事,\x01",
"你们还不感谢大家?\x02",
)
)
CloseMessageWindow()
OP_8E(0x101, 0x7AD0, 0x0, 0x3DA4, 0x7D0, 0x0)
Sleep(100)
SetChrFlags(0x101, 0x800)
SetChrChipByIndex(0x101, 10)
OP_22(0x1F4, 0x0, 0x64)
OP_99(0x101, 0x0, 0xC, 0x7D0)
OP_44(0x11, 0xFF)
OP_44(0x10, 0xFF)
ChrTalk(
0x101,
"#005F再有下次的话就送你们下地狱!\x02",
)
CloseMessageWindow()
OP_62(0xC, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
OP_95(0xC, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
TurnDirection(0xC, 0x101, 0)
OP_62(0xD, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
OP_95(0xD, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
TurnDirection(0xD, 0x101, 0)
OP_62(0xE, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
OP_95(0xE, 0x0, 0x0, 0x0, 0x320, 0x2EE0)
TurnDirection(0xE, 0x101, 0)
OP_22(0x194, 0x0, 0x64)
ChrTalk(
0xC,
"咪嘎~~!!\x02",
)
CloseMessageWindow()
Sleep(100)
OP_A2(0x3)
SetChrPos(0xC, 0, 0, 0, 0)
SetChrPos(0x9, 33150, 0, 16129, 0)
OP_22(0x81, 0x0, 0x64)
OP_43(0x9, 0x1, 0x0, 0x18)
Sleep(200)
OP_43(0xB, 0x2, 0x0, 0x1B)
SetChrPos(0xD, 0, 0, 0, 0)
SetChrPos(0xA, 33390, 0, 15210, 0)
OP_22(0x81, 0x0, 0x64)
OP_43(0xA, 0x1, 0x0, 0x19)
Sleep(200)
SetChrPos(0xE, 0, 0, 0, 0)
SetChrPos(0xB, 32990, 0, 14530, 0)
OP_22(0x81, 0x0, 0x64)
OP_43(0xB, 0x1, 0x0, 0x1A)
Sleep(1000)
OP_A3(0x3)
OP_44(0x9, 0xFF)
OP_44(0xA, 0xFF)
OP_44(0xB, 0xFF)
OP_44(0xC, 0xFF)
OP_44(0xD, 0xFF)
OP_44(0xE, 0xFF)
TurnDirection(0x101, 0xB, 0)
Sleep(2000)
TurnDirection(0x12, 0x101, 400)
ChrTalk(
0x12,
(
"好了,终于解决了。\x01",
"已经这么晚了,都该睡了。\x02",
)
)
CloseMessageWindow()
OP_43(0xF, 0x1, 0x0, 0x1C)
OP_43(0x10, 0x1, 0x0, 0x1C)
OP_43(0x11, 0x1, 0x0, 0x1C)
OP_43(0x13, 0x1, 0x0, 0x1C)
OP_44(0x101, 0xFF)
OP_51(0x101, 0x8, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
ClearChrFlags(0x101, 0x800)
SetChrChipByIndex(0x101, 65535)
TurnDirection(0x101, 0x10, 400)
TurnDirection(0x102, 0x12, 400)
ChrTalk(
0x12,
"你们也留下好好休息吧。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#001F#4P好~\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F#3P又给你们添麻烦了。\x02",
)
CloseMessageWindow()
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_221E")
OP_2B(0x2, 0x2)
Jump("loc_222B")
label("loc_221E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_222B")
OP_2B(0x2, 0x1)
label("loc_222B")
OP_A2(0x239)
OP_28(0x2, 0x1, 0x800)
OP_28(0x2, 0x1, 0x1000)
OP_28(0x2, 0x4, 0x10)
OP_20(0x5DC)
FadeToDark(1000, 0, -1)
OP_0D()
OP_21()
NewScene("ED6_DT01/T0411 ._SN", 1, 0, 0)
IdleLoop()
label("loc_2259")
Return()
# Function_21_11F4 end
def Function_22_225A(): pass
label("Function_22_225A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_229D")
OP_95(0xFE, 0x0, 0x0, 0x0, 0x190, 0xBB8)
Sleep(300)
OP_95(0xFE, 0x0, 0x0, 0x0, 0x190, 0xBB8)
Sleep(2500)
Jump("Function_22_225A")
label("loc_229D")
Return()
# Function_22_225A end
def Function_23_229E(): pass
label("Function_23_229E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_230D")
Sleep(3000)
OP_8F(0xFE, 0x733C, 0x0, 0x4394, 0x12C, 0x0)
Sleep(2000)
OP_8F(0xFE, 0x7274, 0x0, 0x42CC, 0x5DC, 0x0)
Sleep(3000)
OP_8F(0xFE, 0x72D8, 0x0, 0x4330, 0x12C, 0x0)
Sleep(500)
OP_8F(0xFE, 0x7274, 0x0, 0x42CC, 0x2BC, 0x0)
Jump("Function_23_229E")
label("loc_230D")
Return()
# Function_23_229E end
def Function_24_230E(): pass
label("Function_24_230E")
OP_8E(0x9, 0x85AC, 0x32, 0x740E, 0x2EE0, 0x0)
Return()
# Function_24_230E end
def Function_25_2323(): pass
label("Function_25_2323")
OP_8E(0xA, 0x85AC, 0x32, 0x740E, 0x2EE0, 0x0)
Return()
# Function_25_2323 end
def Function_26_2338(): pass
label("Function_26_2338")
OP_8E(0xB, 0x85AC, 0x32, 0x740E, 0x2EE0, 0x0)
Return()
# Function_26_2338 end
def Function_27_234D(): pass
label("Function_27_234D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_END)), "loc_2389")
TurnDirection(0xF, 0xA, 0)
TurnDirection(0x10, 0xA, 0)
TurnDirection(0x11, 0xA, 0)
TurnDirection(0x12, 0xA, 0)
TurnDirection(0x13, 0xA, 0)
TurnDirection(0x101, 0xA, 0)
TurnDirection(0x102, 0xA, 0)
OP_48()
Jump("Function_27_234D")
label("loc_2389")
Return()
# Function_27_234D end
def Function_28_238A(): pass
label("Function_28_238A")
TurnDirection(0xFE, 0x101, 400)
Return()
# Function_28_238A end
def Function_29_2392(): pass
label("Function_29_2392")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2423")
EventBegin(0x0)
ClearMapFlags(0x1)
OP_A3(0x0)
OP_A2(0x1)
OP_A3(0x2)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_69(0x9, 0x1F4)
OP_43(0x101, 0x1, 0x0, 0x11)
OP_43(0x102, 0x1, 0x0, 0x11)
OP_22(0x81, 0x0, 0x64)
OP_8E(0x9, 0xB1BC, 0x0, 0xB220, 0x2EE0, 0x0)
OP_8E(0x9, 0xBB80, 0x0, 0xC738, 0x2EE0, 0x0)
SetChrPos(0x9, 22700, 0, 25300, 0)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
Call(0, 32)
label("loc_2423")
Return()
# Function_29_2392 end
def Function_30_2424(): pass
label("Function_30_2424")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_24A1")
EventBegin(0x0)
ClearMapFlags(0x1)
OP_A3(0x0)
OP_A3(0x1)
OP_A2(0x2)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_69(0x9, 0x1F4)
OP_43(0x101, 0x1, 0x0, 0x11)
OP_43(0x102, 0x1, 0x0, 0x11)
OP_22(0x81, 0x0, 0x64)
OP_8E(0x9, 0x846C, 0x0, 0x3A98, 0x2EE0, 0x0)
SetChrPos(0x9, 46100, 0, 15200, 0)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
Call(0, 32)
label("loc_24A1")
Return()
# Function_30_2424 end
def Function_31_24A2(): pass
label("Function_31_24A2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_2533")
EventBegin(0x0)
ClearMapFlags(0x1)
OP_A2(0x0)
OP_A3(0x1)
OP_A3(0x2)
OP_62(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
OP_22(0x27, 0x0, 0x64)
OP_69(0x9, 0x1F4)
OP_43(0x101, 0x1, 0x0, 0x11)
OP_43(0x102, 0x1, 0x0, 0x11)
OP_22(0x81, 0x0, 0x64)
OP_8E(0x9, 0xA4D8, 0x0, 0x2A94, 0x2EE0, 0x0)
OP_8E(0x9, 0x927C, 0x0, 0x27D8, 0x2EE0, 0x0)
OP_44(0x101, 0xFF)
OP_44(0x102, 0xFF)
SetChrPos(0x9, 39000, 0, 42000, 270)
Call(0, 32)
label("loc_2533")
Return()
# Function_31_24A2 end
def Function_32_2534(): pass
label("Function_32_2534")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0x3), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_25E0")
ChrTalk(
0x101,
"#000F啊啊。又逃走了~\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F这么小的家伙,\x01",
"逃跑起来动作也特别的灵活。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F好好想一下战斗的方法哦,艾丝蒂尔。(※仮)\x02",
)
CloseMessageWindow()
Jump("loc_2A4C")
label("loc_25E0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 6)), scpexpr(EXPR_END)), "loc_26AC")
OP_A2(0x237)
OP_28(0x2, 0x1, 0x400)
ChrTalk(
0x101,
"#007F啊啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#012F真可惜啊。\x02\x03",
"#012F总之当它跳得起劲的时候\x01",
"从背后靠近它,然后再去抓它。\x02",
)
)
CloseMessageWindow()
Jump("loc_2A4C")
label("loc_26AC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 5)), scpexpr(EXPR_END)), "loc_2991")
OP_A2(0x236)
OP_28(0x2, 0x1, 0x200)
ChrTalk(
0x101,
(
"#009F又、又逃走了!?\x02\x03",
"#009F这家伙身子圆圆胖胖的,\x01",
"怎么动作还这么灵活啊?\x02",
)
)
CloseMessageWindow()
TurnDirection(0x1, 0x0, 0)
ChrTalk(
0x102,
(
"#012F动作的确是很灵活,\x01",
"而且反应也相当灵敏。\x02\x03",
"#012F不过,掌握好时机就没问题了。\x02",
)
)
CloseMessageWindow()
TurnDirection(0x0, 0x1, 500)
ClearMapFlags(0x1)
OP_51(0x17, 0x1, (scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x1), scpexpr(EXPR_GET_CHR_WORK, 0x102, 0x1), scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x1), scpexpr(EXPR_SUB), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_IDIV), scpexpr(EXPR_ADD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x17, 0x2, (scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x2), scpexpr(EXPR_GET_CHR_WORK, 0x102, 0x2), scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x2), scpexpr(EXPR_SUB), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_IDIV), scpexpr(EXPR_ADD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x17, 0x3, (scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x3), scpexpr(EXPR_GET_CHR_WORK, 0x102, 0x3), scpexpr(EXPR_GET_CHR_WORK, 0x101, 0x3), scpexpr(EXPR_SUB), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_IDIV), scpexpr(EXPR_ADD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_69(0x17, 0x320)
ChrTalk(
0x101,
"#004F掌握时机?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#012F看得出这种魔兽跳起来的时候\x01",
"警戒性会相对减弱。\x02\x03",
"#012F瞄准这个空档,然后从背后靠近它,\x01",
"这样就应该可以抓住它了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#006F原来是这样啊……\x01",
"当它跳得起劲的时候从背后靠近它对吧。\x02\x03",
"#006F嗯,一定要试试看才行!\x02",
)
)
CloseMessageWindow()
Jump("loc_2A4C")
label("loc_2991")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_END)), "loc_2A4C")
OP_A2(0x235)
OP_28(0x2, 0x1, 0x100)
ChrTalk(
0x101,
"#004F啊啊!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#012F真可惜,被它发现了。\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
"#009F唔~下次一定抓到你!\x02",
)
CloseMessageWindow()
label("loc_2A4C")
OP_44(0x0, 0xFF)
OP_44(0x1, 0xFF)
OP_44(0x9, 0xFF)
EventEnd(0x1)
OP_43(0x9, 0x3, 0x0, 0x2)
Return()
# Function_32_2534 end
def Function_33_2A62(): pass
label("Function_33_2A62")
OP_69(0x101, 0x3E8)
Return()
# Function_33_2A62 end
def Function_34_2A6A(): pass
label("Function_34_2A6A")
EventBegin(0x1)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x46, 4)), scpexpr(EXPR_END)), "loc_2B58")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_2AE9")
ChrTalk(
0x102,
(
"#012F魔兽一定还在农场里面。\x01",
" \x02\x03",
"我们还是不要去别的地方吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_2B55")
label("loc_2AE9")
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F魔兽一定还在农场里面某个地方。\x01",
" \x02\x03",
"我们还是回去巡逻吧。\x02",
)
)
CloseMessageWindow()
label("loc_2B55")
Jump("loc_2C08")
label("loc_2B58")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_2BB6")
ChrTalk(
0x102,
(
"#012F这里是农场的出口……\x01",
"我们要在农场里面巡逻才行。\x02",
)
)
CloseMessageWindow()
Jump("loc_2C08")
label("loc_2BB6")
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#012F那里是农场的出口呢。\x01",
"我们要在农场里面巡逻才行。\x02",
)
)
CloseMessageWindow()
label("loc_2C08")
OP_8E(0x0, 0x5E24, 0x14, 0xD1C4, 0xBB8, 0x0)
Sleep(50)
EventEnd(0x4)
Return()
# Function_34_2A6A end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
21b9c07f5745ad1954c3ca3af77d74dac67620d0 | bfb113c3076f5b0570953583e7a2321c774d73ea | /venv/Scripts/easy_install-3.8-script.py | 88b18d55d3cfc7b1e4ca627aaafb9710ef93d7c3 | [] | no_license | gsudarshan1990/Training_Projects | 82c48d5492cb4be94db09ee5c66142c370794e1c | 2b7edfafc4e448bd558c034044570496ca68bf2d | refs/heads/master | 2022-12-10T15:56:17.535096 | 2020-09-04T06:02:31 | 2020-09-04T06:02:31 | 279,103,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | #!E:\Training_Projects\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"[email protected]"
] | |
543db5403219ea73e88c510b470c95cc4e6a7ff0 | 18c8a7cb838702cdf1c4d4e9f66b2cffd63130aa | /{{cookiecutter.project_slug}}/config/settings/test.py | 69b8d3be1f0ba85b3bda931e3cd01983f435d82f | [
"MIT"
] | permissive | DiscordApps/launchr | c304008a0d05bdf2d3ed77ada365f80d861f307d | 61049879591ba851ce50d1651abc7193aae4aca0 | refs/heads/master | 2022-02-26T21:22:36.656108 | 2019-10-11T13:05:35 | 2019-10-11T13:05:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="!!!SET DJANGO_SECRET_KEY!!!",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
TEMPLATES[0]['OPTIONS']['debug'] = True # noqa F405
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
| [
"[email protected]"
] | |
26d8aaa4d2b005645c48370b880a005933c1fdc0 | 9129a791f45cd3b25d8a5da57ee6936bfe4e73a2 | /learn-django/jango/Scripts/pip3-script.py | d5c70d9b707ed3b9eea1c0bec34cc59c4c2ec8ba | [] | no_license | Sunsetjue/Django2.0 | 94be49ed9d65dab6398ab8f0ddd02bb1871afb6b | 102bf0f2bd2d309b76f3247e396b7e83c5f6c2f8 | refs/heads/master | 2020-04-22T03:25:24.014196 | 2019-02-15T16:18:23 | 2019-02-15T16:18:23 | 170,086,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | #!C:\Users\l\learn-django\jango\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"10073631822qq.com"
] | 10073631822qq.com |
63309f5b16e32ac3d1a5c83f1cabc9d2e02f0132 | d05a59feee839a4af352b7ed2fd6cf10a288a3cb | /xlsxwriter/test/workbook/test_write_workbook_view.py | 683d301b318446951f7cca09b7fc061d5ee04506 | [
"BSD-2-Clause-Views"
] | permissive | elessarelfstone/XlsxWriter | 0d958afd593643f990373bd4d8a32bafc0966534 | bb7b7881c7a93c89d6eaac25f12dda08d58d3046 | refs/heads/master | 2020-09-24T06:17:20.840848 | 2019-11-24T23:43:01 | 2019-11-24T23:43:01 | 225,685,272 | 1 | 0 | NOASSERTION | 2019-12-03T18:09:06 | 2019-12-03T18:09:05 | null | UTF-8 | Python | false | false | 4,953 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...workbook import Workbook
class TestWriteWorkbookView(unittest.TestCase):
"""
Test the Workbook _write_workbook_view() method.
"""
def setUp(self):
self.fh = StringIO()
self.workbook = Workbook()
self.workbook._set_filehandle(self.fh)
def test_write_workbook_view1(self):
"""Test the _write_workbook_view() method"""
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view2(self):
"""Test the _write_workbook_view() method"""
self.workbook.worksheet_meta.activesheet = 1
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" activeTab="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view3(self):
"""Test the _write_workbook_view() method"""
self.workbook.worksheet_meta.firstsheet = 1
self.workbook.worksheet_meta.activesheet = 1
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" firstSheet="2" activeTab="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view4(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(0, 0)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view5(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(None, None)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view6(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(1073, 644)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view7(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(123, 70)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="1845" windowHeight="1050"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view8(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(719, 490)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="10785" windowHeight="7350"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view9(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio()
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view10(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(34.6)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="346"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view11(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(0)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view12(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(100)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="1000"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def tearDown(self):
self.workbook.fileclosed = 1
| [
"[email protected]"
] | |
f36bac8cb3c65b13ba04323591cf99f819b50868 | 431c8beacf2b1a54982bf2d06b3dc5cebba87c69 | /buttontest.py | 1b228e5bfeb4437a78e6f55ab31ba9c5574807e5 | [
"MIT"
] | permissive | watrt/micropython-tft-gui | 290c27ba810943033d26214b7f9ec38129fa774e | 1ae9eafccb7084093eb80354e9e30d1f02367221 | refs/heads/master | 2020-12-10T06:49:51.299653 | 2019-05-25T07:30:57 | 2019-05-25T07:30:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,508 | py | # buttontest.py Test/demo of pushbutton classes for Pybboard TFT GUI
# The MIT License (MIT)
#
# Copyright (c) 2016 Peter Hinch
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from constants import *
from ugui import Button, ButtonList, RadioButtons, Checkbox, Label, Screen
import font14
import font10
from tft_local import setup
class ButtonScreen(Screen):
def __init__(self):
super().__init__()
# These tables contain args that differ between members of a set of related buttons
table = [
{'fgcolor' : GREEN, 'text' : 'Yes', 'args' : ('Oui', 2), 'fontcolor' : (0, 0, 0)},
{'fgcolor' : RED, 'text' : 'No', 'args' : ('Non', 2)},
{'fgcolor' : BLUE, 'text' : '???', 'args' : ('Que?', 2), 'fill': False},
{'fgcolor' : GREY, 'text' : 'Rats', 'args' : ('Rats', 2), 'shape' : CLIPPED_RECT,},
]
# Highlight buttons: only tabulate data that varies
table_highlight = [
{'text' : 'P', 'args' : ('p', 2)},
{'text' : 'Q', 'args' : ('q', 2)},
{'text' : 'R', 'args' : ('r', 2)},
{'text' : 'S', 'args' : ('s', 2)},
]
# A Buttonset with two entries
table_buttonset = [
{'fgcolor' : GREEN, 'shape' : CLIPPED_RECT, 'text' : 'Start', 'args' : ('Live', 2)},
{'fgcolor' : RED, 'shape' : CLIPPED_RECT, 'text' : 'Stop', 'args' : ('Die', 2)},
]
table_radiobuttons = [
{'text' : '1', 'args' : ('1', 3)},
{'text' : '2', 'args' : ('2', 3)},
{'text' : '3', 'args' : ('3', 3)},
{'text' : '4', 'args' : ('4', 3)},
]
labels = { 'width' : 70,
'fontcolor' : WHITE,
'border' : 2,
'fgcolor' : RED,
'bgcolor' : (0, 40, 0),
'font' : font14,
}
# Uncomment this line to see 'skeleton' style greying-out:
# Screen.tft.grey_color()
# Labels
self.lstlbl = []
for n in range(5):
self.lstlbl.append(Label((390, 40 * n), **labels))
# Button assortment
x = 0
for t in table:
Button((x, 0), font = font14, callback = self.callback, **t)
x += 70
# Highlighting buttons
x = 0
for t in table_highlight:
Button((x, 60), fgcolor = GREY, fontcolor = BLACK, litcolor = WHITE,
font = font14, callback = self.callback, **t)
x += 70
# Start/Stop toggle
self.bs = ButtonList(self.callback)
self.bs0 = None
for t in table_buttonset: # Buttons overlay each other at same location
button = self.bs.add_button((0, 240), font = font14, fontcolor = BLACK, height = 30, **t)
if self.bs0 is None: # Save for reset button callback
self.bs0 = button
# Radio buttons
x = 0
self.rb = RadioButtons(BLUE, self.callback) # color of selected button
self.rb0 = None
for t in table_radiobuttons:
button = self.rb.add_button((x, 140), font = font14, fontcolor = WHITE,
fgcolor = (0, 0, 90), height = 40, width = 40, **t)
if self.rb0 is None: # Save for reset button callback
self.rb0 = button
x += 60
# Checkbox
self.cb1 = Checkbox((340, 0), callback = self.cbcb, args = (0,))
self.cb2 = Checkbox((340, 40), fillcolor = RED, callback = self.cbcb, args = (1,))
# Reset button
self.lbl_reset = Label((200, 220), font = font10, value = 'Reset also responds to long press')
self.btn_reset = Button((300, 240), font = font14, height = 30, width = 80,
fgcolor = BLUE, shape = RECTANGLE, text = 'Reset', fill = True,
callback = self.cbreset, args = (4,), onrelease = False,
lp_callback = self.callback, lp_args = ('long', 4))
# Quit
self.btn_quit = Button((390, 240), font = font14, height = 30, width = 80,
fgcolor = RED, shape = RECTANGLE, text = 'Quit',
callback = self.quit)
# Enable/Disable toggle
self.bs_en = ButtonList(self.cb_en_dis)
self.tup_en_dis = (self.cb1, self.cb2, self.rb, self.bs) # Items affected by enable/disable button
self.bs_en.add_button((200, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = GREEN, shape = RECTANGLE, text = 'Disable', args = (True,))
self.bs_en.add_button((200, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = RED, shape = RECTANGLE, text = 'Enable', args = (False,))
def callback(self, button, arg, idx_label):
self.lstlbl[idx_label].value(arg)
def quit(self, button):
Screen.shutdown()
def cbcb(self, checkbox, idx_label):
if checkbox.value():
self.lstlbl[idx_label].value('True')
else:
self.lstlbl[idx_label].value('False')
def cbreset(self, button, idx_label):
self.cb1.value(False)
self.cb2.value(False)
self.bs.value(self.bs0)
self.rb.value(self.rb0)
self.lstlbl[idx_label].value('Short')
def cb_en_dis(self, button, disable):
for item in self.tup_en_dis:
item.greyed_out(disable)
def test():
print('Testing TFT...')
setup()
Screen.change(ButtonScreen)
test()
| [
"[email protected]"
] | |
f17ed08bf47fc77482e427e5e7c87e52a0ab5d46 | 756d50be34245115ad28e79f4dfceb5516d17225 | /relsearch.py | af268beec2663fa43b51c0f5de63ab395fea2d2b | [] | no_license | abyssonym/gg3 | f1ce189a2a70786da8b2ab78281b39615fc59af2 | 1e6adadc6765d339ebbd7ca650d9b435d56fb366 | refs/heads/master | 2021-01-18T13:51:25.702975 | 2017-11-16T22:26:30 | 2017-11-16T22:26:30 | 34,976,112 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,264 | py | from sys import argv
from string import ascii_lowercase
from shutil import copyfile
filename = argv[1]
outfile = "test.smc"
searchstr = argv[2].lower()
if '.' in searchstr:
searchstr = map(int, searchstr.split('.'))
else:
numdict = dict([(b, a) for (a, b) in enumerate(ascii_lowercase)])
searchstr = [numdict[c] if c in numdict else c for c in searchstr]
print searchstr
f = open(filename, 'r+b')
addr = 0
checkstr = None
while True:
f.seek(addr)
bytestr = f.read(len(searchstr))
if len(bytestr) != len(searchstr):
break
bytestr = map(ord, bytestr)
offset = bytestr[0] - searchstr[0]
newbytestr = [i - offset for i in bytestr]
if all([a == b for (a, b) in zip(newbytestr, searchstr)]):
print "%x" % addr
print bytestr
check = None
if not checkstr:
check = raw_input("> ")
if check and check.lower()[0] == 'y':
checkstr = bytestr
if checkstr and all([a == b for (a, b) in zip(checkstr, bytestr)]):
copyfile(filename, outfile)
f2 = open(outfile, 'r+b')
f2.seek(addr)
f2.write("".join([chr(bytestr[0]) for _ in bytestr]))
f2.close()
check = raw_input("> ")
addr += 1
| [
"none"
] | none |
e1bc080590be397ae15d86246e7de108caaf0d0f | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/benanne_kaggle-ndsb/kaggle-ndsb-master/dihedral_ops.py | e5d8d87655fb7072e1ad79d489e425aaca16ac92 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 12,079 | py | import numpy as np
import theano
import theano.sandbox.cuda as cuda
from pycuda.compiler import SourceModule
import theano.misc.pycuda_init
class PyCudaOp(cuda.GpuOp):
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def __str__(self):
return self.__class__.__name__
def output_type(self, inp):
raise NotImplementedError
def make_node(self, inp):
inp = cuda.basic_ops.gpu_contiguous(
cuda.basic_ops.as_cuda_ndarray_variable(inp))
assert inp.dtype == "float32"
return theano.Apply(self, [inp], [self.output_type(inp)()])
class CyclicRollOp(PyCudaOp):
def output_type(self, inp):
return cuda.CudaNdarrayType(broadcastable=[False] * (inp.type.ndim))
def make_thunk(self, node, storage_map, _, _2):
inputs = [storage_map[v] for v in node.inputs]
outputs = [storage_map[v] for v in node.outputs]
mod = SourceModule("""
__global__ void cyclic_roll(float * input, float * output, int batch_size, int num_features) {
int x = blockIdx.x*blockDim.x + threadIdx.x; // feature dim, fastest varying index!
int y = blockIdx.y*blockDim.y + threadIdx.y; // batch dim
int height = 4 * batch_size;
int width = 4 * num_features;
if (x < num_features && y < height) {
for (int i = 0; i < 4; i++) {
int y_out = (y + batch_size * (4 - i)) % height;
int x_out = x + num_features * i;
output[y_out * width + x_out] = input[y * num_features + x];
}
}
}""")
kernel = mod.get_function("cyclic_roll")
def thunk():
in_shape = inputs[0][0].shape
rows, cols = in_shape
assert rows % 4 == 0
out_shape = (rows, 4 * cols)
batch_size = rows // 4
num_features = cols
out = outputs[0]
# only allocate if there is no previous allocation of the right size.
if out[0] is None or out[0].shape != out_shape:
out[0] = cuda.CudaNdarray.zeros(out_shape)
x_block = 16
y_block = 16
block = (x_block, y_block, 1)
x_grid = int(np.ceil(float(in_shape[1]) / x_block))
y_grid = int(np.ceil(float(in_shape[0]) / y_block))
grid = (x_grid, y_grid, 1)
kernel(inputs[0][0], out[0], np.intc(batch_size), np.intc(num_features), block=block, grid=grid)
thunk.inputs = inputs
thunk.outputs = outputs
thunk.lazy = False
return thunk
def grad(self, inp, grads):
top, = grads
top = cuda.basic_ops.gpu_contiguous(top)
return [CyclicRollGradOp()(top)]
cyclic_roll = CyclicRollOp()
class CyclicRollGradOp(PyCudaOp):
def output_type(self, inp):
return cuda.CudaNdarrayType(broadcastable=[False] * (inp.type.ndim))
def make_thunk(self, node, storage_map, _, _2):
inputs = [storage_map[v] for v in node.inputs]
outputs = [storage_map[v] for v in node.outputs]
mod = SourceModule("""
__global__ void cyclic_roll_grad(float * input, float * output, int batch_size, int num_features) {
int x = blockIdx.x*blockDim.x + threadIdx.x; // feature dim, fastest varying index!
int y = blockIdx.y*blockDim.y + threadIdx.y; // batch dim
int height = 4 * batch_size;
int width = 4 * num_features;
float val = 0;
if (x < num_features && y < height) {
for (int i = 0; i < 4; i++) {
int y_in = (y + batch_size * (4 - i)) % height;
int x_in = x + num_features * i;
val += input[y_in * width + x_in];
}
output[y * num_features + x] = val;
}
}""")
kernel = mod.get_function("cyclic_roll_grad")
def thunk():
in_shape = inputs[0][0].shape
rows, cols = in_shape
assert rows % 4 == 0
assert cols % 4 == 0
out_shape = (rows, cols // 4)
batch_size = rows // 4
num_features = cols // 4
out = outputs[0]
# only allocate if there is no previous allocation of the right size.
if out[0] is None or out[0].shape != out_shape:
out[0] = cuda.CudaNdarray.zeros(out_shape)
x_block = 16
y_block = 16
block = (x_block, y_block, 1)
x_grid = int(np.ceil(float(out_shape[1]) / x_block))
y_grid = int(np.ceil(float(out_shape[0]) / y_block))
grid = (x_grid, y_grid, 1)
kernel(inputs[0][0], out[0], np.intc(batch_size), np.intc(num_features), block=block, grid=grid)
thunk.inputs = inputs
thunk.outputs = outputs
thunk.lazy = False
return thunk
class CyclicConvRollOp(PyCudaOp):
def output_type(self, inp):
return cuda.CudaNdarrayType(broadcastable=[False] * (inp.type.ndim))
def make_thunk(self, node, storage_map, _, _2):
inputs = [storage_map[v] for v in node.inputs]
outputs = [storage_map[v] for v in node.outputs]
mod = SourceModule("""
__global__ void cyclic_convroll(float * input, float * output, int batch_size, int num_channels, int map_size) {
int x = blockIdx.x*blockDim.x + threadIdx.x; // feature dim, fastest varying index!
int y = blockIdx.y*blockDim.y + threadIdx.y; // batch dim
int map_size_sq = map_size * map_size;
int example_size = num_channels * map_size_sq;
int num_rows = 4 * batch_size; // number of rows in the input/output, seen as a 2D array
int num_cols = 4 * example_size; // number of columns in the output, seen as a 2D array
// feature indices (channels, height, width)
int x_channel = x / map_size_sq;
int x_f0 = (x % map_size_sq) / map_size;
int x_f1 = x % map_size;
int x_out_f0 = x_f0;
int x_out_f1 = x_f1;
int tmp;
if (x < example_size && y < num_rows) {
for (int i = 0; i < 4; i++) {
int y_out = (y + batch_size * (4 - i)) % num_rows;
int x_out = example_size * i + x_channel * map_size_sq + x_out_f0 * map_size + x_out_f1;
output[y_out * num_cols + x_out] = input[y * example_size + x];
// note that the writes to output go in reverse order for all the rotated feature maps.
// this may slow things down a little, perhaps there is room for further optimization.
// rotate
tmp = x_out_f0;
x_out_f0 = x_out_f1;
x_out_f1 = map_size - 1 - tmp;
}
}
}""")
kernel = mod.get_function("cyclic_convroll")
def thunk():
in_shape = inputs[0][0].shape
full_batch_size, num_channels, height, width = in_shape
assert height == width # else convroll doesn't make sense
assert full_batch_size % 4 == 0
out_shape = (full_batch_size, 4 * num_channels, height, width)
batch_size = full_batch_size // 4
example_size = num_channels * height * width
map_size = height
out = outputs[0]
# only allocate if there is no previous allocation of the right size.
if out[0] is None or out[0].shape != out_shape:
out[0] = cuda.CudaNdarray.zeros(out_shape)
x_block = 16
y_block = 16
block = (x_block, y_block, 1)
x_grid = int(np.ceil(float(example_size) / x_block))
y_grid = int(np.ceil(float(full_batch_size) / y_block))
grid = (x_grid, y_grid, 1)
kernel(inputs[0][0], out[0], np.intc(batch_size), np.intc(num_channels), np.intc(map_size), block=block, grid=grid)
thunk.inputs = inputs
thunk.outputs = outputs
thunk.lazy = False
return thunk
def grad(self, inp, grads):
top, = grads
top = cuda.basic_ops.gpu_contiguous(top)
return [CyclicConvRollGradOp()(top)]
cyclic_convroll = CyclicConvRollOp()
class CyclicConvRollGradOp(PyCudaOp):
def output_type(self, inp):
return cuda.CudaNdarrayType(broadcastable=[False] * (inp.type.ndim))
def make_thunk(self, node, storage_map, _, _2):
inputs = [storage_map[v] for v in node.inputs]
outputs = [storage_map[v] for v in node.outputs]
mod = SourceModule("""
__global__ void cyclic_convroll_grad(float * input, float * output, int batch_size, int num_channels, int map_size) {
int x = blockIdx.x*blockDim.x + threadIdx.x; // feature dim, fastest varying index!
int y = blockIdx.y*blockDim.y + threadIdx.y; // batch dim
int map_size_sq = map_size * map_size;
int example_size = num_channels * map_size_sq;
int num_rows = 4 * batch_size; // number of rows in the input/output, seen as a 2D array
int num_cols = 4 * example_size; // number of columns in the input, seen as a 2D array
// feature indices (channels, height, width)
int x_channel = x / map_size_sq;
int x_f0 = (x % map_size_sq) / map_size;
int x_f1 = x % map_size;
int x_in_f0 = x_f0;
int x_in_f1 = x_f1;
int tmp;
float val;
if (x < example_size && y < num_rows) {
for (int i = 0; i < 4; i++) {
int y_in = (y + batch_size * (4 - i)) % num_rows;
int x_in = example_size * i + x_channel * map_size_sq + x_in_f0 * map_size + x_in_f1;
val += input[y_in * num_cols + x_in];
// rotate
tmp = x_in_f0;
x_in_f0 = x_in_f1;
x_in_f1 = map_size - 1 - tmp;
}
output[y * example_size + x] = val;
}
}""")
kernel = mod.get_function("cyclic_convroll_grad")
def thunk():
in_shape = inputs[0][0].shape
full_batch_size, num_channels_rolled, height, width = in_shape
assert height == width # else convroll doesn't make sense
assert full_batch_size % 4 == 0
assert num_channels_rolled % 4 == 0
num_channels = num_channels_rolled // 4
batch_size = full_batch_size // 4
out_shape = (full_batch_size, num_channels, height, width)
example_size = num_channels * height * width
map_size = height
out = outputs[0]
# only allocate if there is no previous allocation of the right size.
if out[0] is None or out[0].shape != out_shape:
out[0] = cuda.CudaNdarray.zeros(out_shape)
x_block = 16
y_block = 16
block = (x_block, y_block, 1)
x_grid = int(np.ceil(float(example_size) / x_block))
y_grid = int(np.ceil(float(full_batch_size) / y_block))
grid = (x_grid, y_grid, 1)
kernel(inputs[0][0], out[0], np.intc(batch_size), np.intc(num_channels), np.intc(map_size), block=block, grid=grid)
thunk.inputs = inputs
thunk.outputs = outputs
thunk.lazy = False
return thunk
| [
"[email protected]"
] | |
761cee9bc33bc3cdd7d2e32c4faecdbf2ed7481f | bab76d8cf312ee3eae66472b6abd119903e17e8e | /CountAndSay.py | 13420321dce191e20923da4c08ead73e60c68669 | [] | no_license | lixuanhong/LeetCode | 91131825d5eca144a46abe82a2ef04ea1f3ff025 | 48d436701840f8c162829cb101ecde444def2307 | refs/heads/master | 2020-04-05T02:54:52.473259 | 2018-11-07T05:31:30 | 2018-11-07T05:31:30 | 156,494,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | """
The count-and-say sequence is the sequence of integers with the first five terms as following:
1. 1
2. 11
3. 21
4. 1211
5. 111221
1 is read off as "one 1" or 11.
11 is read off as "two 1s" or 21.
21 is read off as "one 2, then one 1" or 1211.
Given an integer n, generate the nth term of the count-and-say sequence.
Note: Each term of the sequence of integers will be represented as a string.
Example 1:
Input: 1
Output: "1"
Example 2:
Input: 4
Output: "1211"
"""
#题目大意:
#n=1 返回1
#n=2由于n=1的结果为1,有1个1,所以返回11
#n=3由于n=2结果为11,有2个1,返回21
#n=4由于n=3结果为21,有1个2和1个1,所以返回1211
#给定n,以此类推
class Solution(object):
def countAndSay(self, n):
def count(s):
res = ""
count = 1
for idx, value in enumerate(s):
if idx < len(s) - 1 and s[idx] != s[idx+1]: #因为要从第一个元素开始,所以比较idx和idx+1;要判断idx < len(s) - 1
res += str(count) + value
count = 1
elif idx < len(s) - 1:
count += 1
res += str(count) + value #对最后一个元素操作
return res
s = "1"
for i in range(1, n):
s = count(s) #初始化s = "1", 所以循环n-1次就可以得到结果
return s
obj = Solution()
print(obj.countAndSay(6)) #312211
| [
"[email protected]"
] | |
efe854c65e8348927573faaf27d384468a2f32dc | a90d490bf8a9df04334746acbafa5f8dad20c677 | /recipes/migrations/0009_auto_20160410_2021.py | 6dc28757abd45521c92ee402fe3f6ff6cb9d9162 | [
"MIT"
] | permissive | vanatteveldt/luctor | 8e8ffc20c05cc20a241c677bbe5400a5d71f2882 | 9871fa7afa85f36353b3f4740f73ae3e36d68643 | refs/heads/master | 2023-03-15T20:05:29.220407 | 2023-03-08T22:06:53 | 2023-03-08T22:06:53 | 14,639,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-10 20:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('recipes', '0008_auto_20160405_0044'),
]
operations = [
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('ingredients', models.TextField()),
('instructions', models.TextField()),
],
),
migrations.AlterField(
model_name='lesson',
name='parsed',
field=models.TextField(help_text="Pas hier de opdeling van de kookles in recepten aan. De titel van elk recept wordt aangegeven met ## titel, en ingredienten met | ingredient |. Als je klaar bent klik dan op 'save and continue editing' en op 'view on site'", null=True),
),
migrations.AddField(
model_name='recipe',
name='lesson',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='recipes.Lesson'),
),
]
| [
"[email protected]"
] | |
171f8ff483c3386ba48cab36f6dbfbfd0b5a1471 | 72ab330a358e3d85fb7d3ce29f9da3b9fb1aa6b8 | /quickbooks/objects/timeactivity.py | 459175558f4a1afa9c1e9e0294a3ead28c38c5c9 | [
"MIT"
] | permissive | fuhrysteve/python-quickbooks | d21415c2eb0e758dece4dbdcd3890361781f9ca5 | c017355fa0e9db27001040bf45bc8c48bbd1de45 | refs/heads/master | 2021-01-21T16:04:24.393172 | 2016-01-03T17:50:50 | 2016-01-03T17:50:50 | 48,954,178 | 0 | 0 | null | 2016-01-03T17:18:51 | 2016-01-03T17:18:49 | null | UTF-8 | Python | false | false | 1,302 | py | from six import python_2_unicode_compatible
from .base import Ref, QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin, AttachableRef
@python_2_unicode_compatible
class TimeActivity(QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin):
"""
QBO definition: The TimeActivity entity represents a record of time worked by a vendor or employee.
"""
class_dict = {
"VendorRef": Ref,
"CustomerRef": Ref,
"DepartmentRef": Ref,
"EmployeeRef": Ref,
"ItemRef": Ref,
"ClassRef": Ref,
"AttachableRef": AttachableRef
}
qbo_object_name = "TimeActivity"
def __init__(self):
super(TimeActivity, self).__init__()
self.NameOf = ""
self.TimeZone = ""
self.TxnDate = ""
self.BillableStatus = ""
self.Taxable = False
self.HourlyRate = 0
self.Hours = 0
self.Minutes = 0
self.BreakHours = 0
self.BreakMinutes = 0
self.StartTime = ""
self.EndTime = ""
self.Description = ""
self.VendorRef = None
self.CustomerRef = None
self.DepartmentRef = None
self.EmployeeRef = None
self.ItemRef = None
self.ClassRef = None
self.AttachableRef = None | [
"[email protected]"
] | |
82cbd2304696415df1c92ba0cedca7acc29983b8 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_6/mdlyud002/question2.py | ebdc1dede1c8a1ab523e6c9a607a685c9867f7a7 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | # Yudhi Moodley
# Assignment 6 - Vector Calculator
# 23/04/2014
import math
vectorA = []
vectorB = []
addition = []
dotProduct = []
normalization = []
def vector_calculator():
vector1 = input("Enter vector A:\n")
vectorA = vector1.split(' ') # splits the input
vector2 = input("Enter vector B:\n")
vectorB = vector2.split(' ') # splits the input
# addition funtion
for i in range (3):
addNum = eval(vectorA[i]) + eval(vectorB[i])
addition.append(addNum)
print("A+B = [" + str(addition[0]) + ", " + str(addition[1]) + ", " + str(addition[2]) + "]")
# calculates the funtion of the vector
for i in range (3):
multNum = eval(vectorA[i]) * eval(vectorB[i])
dotProduct.append(multNum)
product = 0
for i in range (3):
product += dotProduct[i]
print("A.B = " + str(product))
# normalizes the vector
aSum = eval(vectorA[0])**2 + eval(vectorA[1])**2 + eval(vectorA[2])**2
aRoot = ("{0:.2f}".format(math.sqrt(aSum)))
print("|A| =",aRoot)
bSum = eval(vectorB[0])**2 + eval(vectorB[1])**2 + eval(vectorB[2])**2
bRoot = ("{0:.2f}".format(math.sqrt(bSum)))
print("|B| =",bRoot)
vector_calculator() | [
"[email protected]"
] | |
6f747b5fb9f472c6c4e89b6ca3610f1726436bee | a5fc521abe901fe9db46a605ec0ba71635bc308b | /managment/migrations/0001_initial.py | 059c4d114c5c4f2ebd8c9b576271218cb6f43401 | [] | no_license | revankarrajat/rms | 020b3736fb0855e547ffe7b3f91eae609cee80c7 | ed68bf427ab5612ae7f3a5308cd8075e19fc1daf | refs/heads/master | 2020-04-12T18:21:32.834786 | 2018-12-24T11:01:57 | 2018-12-24T11:01:57 | 162,676,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,218 | py | # Generated by Django 2.1.4 on 2018-12-14 06:29
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_owner', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='owner',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('num_properties', models.IntegerField(default=0)),
('owner_name', models.CharField(max_length=30)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='property',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('description', models.TextField(default='')),
('price', models.IntegerField()),
('location', models.CharField(max_length=50)),
('num_views', models.IntegerField(default=0)),
('avg_rating', models.IntegerField(default=0)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='managment.owner')),
],
),
migrations.CreateModel(
name='review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(default=0)),
('comment', models.CharField(max_length=100)),
('prop_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='id+', to='managment.property')),
],
),
migrations.CreateModel(
name='visitor',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('profile', models.TextField()),
('pref_location', models.CharField(max_length=30)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='review',
name='visitor_id',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='id+', to='managment.visitor'),
),
]
| [
"[email protected]"
] | |
d4a8da26e252085f122e551fb397f2999bd76eec | 17c9bdd9f740f5549c2ae95c22d0f42907af6bf4 | /beautiful.py | 74906ad989a3ec033998b8f2093f95878b9d36ae | [] | no_license | vim-scripts/beautiful-pastebin | e8a2510aaeff1d782f7fd7552c5475edc1f9a380 | 854f3373b0b8e52a697e9856486906311efd138c | refs/heads/master | 2021-01-13T02:14:33.027077 | 2011-06-08T00:00:00 | 2011-06-23T22:51:24 | 1,865,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | #File: beautiful.py
#Author : Aman Agarwal <[email protected]>
#License : MIT
#version 1.0
#Dependencies : BeautifulSoup <http://www.crummy.com/software/BeautifulSoup/>
#
import urllib2
from BeautifulSoup import BeautifulSoup
import sys
data=urllib2.urlopen(sys.argv[1]).read();
soup = BeautifulSoup(''.join(data))
code=soup('div', {'id' : 'code_frame'})
soup = BeautifulSoup(''.join(str(code[0]).strip()))
code_text = soup.div.div
text=''.join(BeautifulSoup(str(code_text).strip()).findAll(text=True))
code_for_vim = BeautifulSoup(str(text).strip(), convertEntities=BeautifulSoup.HTML_ENTITIES)
print code_for_vim
#print sys.argv[1]
| [
"[email protected]"
] | |
4b18bbafce196b41f74a02a0ded69010dc374a94 | 569db39ea53d67b695d5573e567e1b85cd83176f | /testcases/tutu/Android/AITest/__init__.py | 9596f08c4a577f19f599f5dd0c5ffe3af31631ff | [] | no_license | 1weifang/tutuandroidautotest | f38d9c86023e4d3857b04a8860f9d5ec810c485d | f3fb49eacee27682f478cb8b27a5e8f38d62e2b1 | refs/heads/master | 2022-11-15T04:48:25.333206 | 2020-07-14T03:38:16 | 2020-07-14T03:38:16 | 279,472,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Time : 2019/11/19 9:54
# @Author : Durat
# @Email : [email protected]
# @File : __init__.py.py
# @Software: PyCharm | [
"1qaz!QAZ1"
] | 1qaz!QAZ1 |
b212f30ce3a4c9af92e433cec3f79e72b4586b9f | c71a1053315e9277daf01f2b6d3b7b3f9cc77075 | /menu/urls.py | 7755a066a01883ca36d599c7d6927de8a072fdae | [] | no_license | ingafter60/dinner | f59bb42135d5dd8eb9a42bf665ea1dfc30e01937 | 08b4a33d899ffa45bb7f56b58cfef97703bd2083 | refs/heads/master | 2020-07-03T20:28:27.635316 | 2019-08-18T03:14:44 | 2019-08-18T03:14:44 | 202,040,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | # menu urls.py
from django.urls import path
from menu import views
app_name = 'menu'
urlpatterns = [
path('', views.menuList, name='menuList'),
path('<slug:slug>', views.menuDetail, name='menuDetail'),
] | [
"[email protected]"
] | |
5b9b16d3f350192012b8a8d223b402d78902b5c8 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Spacy/source2.7/spacy/lang/id/tokenizer_exceptions.py | 3bba57e4cbd39db28e872da9aa8cb1051962e24a | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 1,722 | py | # coding: utf8
from __future__ import unicode_literals
import regex as re
from ._tokenizer_exceptions_list import ID_BASE_EXCEPTIONS
from ..tokenizer_exceptions import URL_PATTERN
from ...symbols import ORTH
_exc = {}
for orth in ID_BASE_EXCEPTIONS:
_exc[orth] = [{ORTH: orth}]
orth_title = orth.title()
_exc[orth_title] = [{ORTH: orth_title}]
orth_caps = orth.upper()
_exc[orth_caps] = [{ORTH: orth_caps}]
orth_lower = orth.lower()
_exc[orth_lower] = [{ORTH: orth_lower}]
if '-' in orth:
orth_title = '-'.join([part.title() for part in orth.split('-')])
_exc[orth_title] = [{ORTH: orth_title}]
orth_caps = '-'.join([part.upper() for part in orth.split('-')])
_exc[orth_caps] = [{ORTH: orth_caps}]
for orth in [
"'d", "a.m.", "Adm.", "Bros.", "co.", "Co.", "Corp.", "D.C.", "Dr.", "e.g.",
"E.g.", "E.G.", "Gen.", "Gov.", "i.e.", "I.e.", "I.E.", "Inc.", "Jr.",
"Ltd.", "Md.", "Messrs.", "Mo.", "Mont.", "Mr.", "Mrs.", "Ms.", "p.m.",
"Ph.D.", "Rep.", "Rev.", "Sen.", "St.", "vs.",
"B.A.", "B.Ch.E.", "B.Sc.", "Dr.", "Dra.", "Drs.", "Hj.", "Ka.", "Kp.",
"M.Ag.", "M.Hum.", "M.Kes,", "M.Kom.", "M.M.", "M.P.", "M.Pd.", "M.Sc.",
"M.Si.", "M.Sn.", "M.T.", "M.Th.", "No.", "Pjs.", "Plt.", "R.A.", "S.Ag.",
"S.E.", "S.H.", "S.Hut.", "S.K.M.", "S.Kedg.", "S.Kedh.", "S.Kom.",
"S.Pd.", "S.Pol.", "S.Psi.", "S.S.", "S.Sos.", "S.T.", "S.Tekp.", "S.Th.",
"a.l.", "a.n.", "a.s.", "b.d.", "d.a.", "d.l.", "d/h", "dkk.", "dll.",
"dr.", "drh.", "ds.", "dsb.", "dst.", "faks.", "fax.", "hlm.", "i/o",
"n.b.", "p.p." "pjs.", "s.d.", "tel.", "u.p.",
]:
_exc[orth] = [{ORTH: orth}]
TOKENIZER_EXCEPTIONS = _exc
| [
"[email protected]"
] | |
5506648e5839441f8042bcb8beadaa3a9c211d93 | ef0220b65d3ac860d33d77b6a8eef74d2df3b81b | /mod10/flask/venv/bin/wheel-3.8 | e1b86e3bb58c1e5edfcfb27af7a4be942b7afe75 | [] | no_license | safwanvk/py | b9922df211fe7f4356be3221c639cdd6d3dcf2d8 | 52482a90fb39f15846987607f1988c50f07e758b | refs/heads/master | 2022-12-12T12:21:51.335733 | 2020-09-07T08:21:49 | 2020-09-07T08:21:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | 8 | #!/home/safwan/xanthron/mod10/flask/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
51dd65811d72d74966faf28d8b397f1eb74579b0 | ddda55fcfc84ac5cd78cfc5c336a3df0b9096157 | /components/ble/mynewt-nimble/docs/conf.py | 629b8a4f14b0e686d9f12357cc72d9f04ee83c5c | [
"LicenseRef-scancode-gary-s-brown",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | liu-delong/lu_xing_xiang_one_os | 701b74fceb82dbb2806518bfb07eb85415fab43a | 0c659cb811792f2e190d5a004a531bab4a9427ad | refs/heads/master | 2023-06-17T03:02:13.426431 | 2021-06-28T08:12:41 | 2021-06-28T08:12:41 | 379,661,507 | 2 | 2 | Apache-2.0 | 2021-06-28T10:08:10 | 2021-06-23T16:11:54 | C | UTF-8 | Python | false | false | 5,476 | py | # -*- coding: utf-8 -*-
#
# Mynewt documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 10 11:33:44 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('_ext'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'breathe', 'sphinx.ext.todo',
'sphinx.ext.extlinks'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'NimBLE Bluetooth Stack'
copyright = u'Copyright © 2018 The Apache Software Foundation, Licensed under the Apache License, Version 2.0 Apache and the Apache feather logo are trademarks of The Apache Software Foundation.'
author = u'The Apache Software Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0-b1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'README.rst', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'none'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
html_theme_path = []
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Mynewtdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Mynewt.tex', u'NimBLE Bluetooth Stack',
u'The Apache Software Foundation', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mynewt', u'Mynewt Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Mynewt', u'NimBLE Bluetooth Stack',
author, 'Mynewt', 'One line description of project.',
'Miscellaneous'),
]
breathe_projects = {
"mynewt": "_build/xml"
}
breathe_default_project = "mynewt"
breathe_domain_by_extension = {
"h" : "c",
}
| [
"[email protected]"
] | |
9b209805bbc3e5381db705ee82f66c38d2e5ef39 | a9243f735f6bb113b18aa939898a97725c358a6d | /0.15/_downloads/plot_compute_rt_average.py | fd3b17129bcbbdb519a78a19a35ccce09b59e38c | [] | permissive | massich/mne-tools.github.io | 9eaf5edccb4c35831400b03278bb8c2321774ef2 | 95650593ba0eca4ff8257ebcbdf05731038d8d4e | refs/heads/master | 2020-04-07T08:55:46.850530 | 2019-09-24T12:26:02 | 2019-09-24T12:26:02 | 158,233,630 | 0 | 0 | BSD-3-Clause | 2018-11-19T14:06:16 | 2018-11-19T14:06:16 | null | UTF-8 | Python | false | false | 1,912 | py | """
========================================================
Compute real-time evoked responses using moving averages
========================================================
This example demonstrates how to connect to an MNE Real-time server
using the RtClient and use it together with RtEpochs to compute
evoked responses using moving averages.
Note: The MNE Real-time server (mne_rt_server), which is part of mne-cpp,
has to be running on the same computer.
"""
# Authors: Martin Luessi <[email protected]>
# Mainak Jas <[email protected]>
#
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.datasets import sample
from mne.realtime import RtEpochs, MockRtClient
print(__doc__)
# Fiff file to simulate the realtime client
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = mne.io.read_raw_fif(raw_fname, preload=True)
# select gradiometers
picks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True,
stim=True, exclude=raw.info['bads'])
# select the left-auditory condition
event_id, tmin, tmax = 1, -0.2, 0.5
# create the mock-client object
rt_client = MockRtClient(raw)
# create the real-time epochs object
rt_epochs = RtEpochs(rt_client, event_id, tmin, tmax, picks=picks,
decim=1, reject=dict(grad=4000e-13, eog=150e-6))
# start the acquisition
rt_epochs.start()
# send raw buffers
rt_client.send_data(rt_epochs, picks, tmin=0, tmax=150, buffer_size=1000)
for ii, ev in enumerate(rt_epochs.iter_evoked()):
print("Just got epoch %d" % (ii + 1))
ev.pick_types(meg=True, eog=False) # leave out the eog channel
if ii == 0:
evoked = ev
else:
evoked = mne.combine_evoked([evoked, ev], weights='nave')
plt.clf() # clear canvas
evoked.plot(axes=plt.gca()) # plot on current figure
plt.pause(0.05)
| [
"[email protected]"
] | |
3da13c58c4199d31c98e3b0c81e7ab5d55abad24 | a873f3cd46a10ad879fc56d78e1f533d8bf486c0 | /z_python-stu1/first/廖雪峰/迭代.py | 4115de44525792f329471d5da4b183b906436215 | [] | no_license | shenhaiyu0923/resful | d0301b39363e6b3d3659f62fa4a9b2532ebcd225 | 1e66cae7d68fa231794776953cc1a5e999bf36c6 | refs/heads/master | 2021-07-08T20:46:57.300298 | 2021-06-01T08:17:27 | 2021-06-01T08:17:27 | 244,308,016 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '[email protected]'
def findMinAndMax(L):
if len(L) == 0:
return (None, None)
else:
for i, x in enumerate(L):
if i == 0:
min = max = x
else:
if x > max:
max = x
if x < min:
min = x
return (min, max)
# 测试
if findMinAndMax([]) != (None, None):
print('测试失败!')
elif findMinAndMax([7]) != (7, 7):
print('测试失败!')
elif findMinAndMax([7, 1]) != (1, 7):
print('测试失败!')
elif findMinAndMax([7, 1, 3, 9, 5]) != (1, 9):
print('测试失败!')
else:
print('测试成功!')
| [
"[email protected]"
] | |
b9f3a49f7f1fe0e94be6a1066047c260b2555dcc | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/TauES_test/down/emb/DoubleMuParked/StoreResults-Run2012D_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374851334/HTT_24Jul_newTES_manzoni_Down_Jobs/Job_18/run_cfg.py | 38dbc249e4f6a3beb3e7f9386fe60200d89f9895 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69,054 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/TauES_test/down/emb/DoubleMuParked/StoreResults-Run2012D_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374851334/HTT_24Jul_newTES_manzoni_Down_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
lumisToProcess = cms.untracked.VLuminosityBlockRange( ("190645:10-190645:110", "190646:1-190646:111", "190659:33-190659:167", "190679:1-190679:55", "190688:69-190688:249",
"190702:51-190702:53", "190702:55-190702:122", "190702:124-190702:169", "190703:1-190703:252", "190704:1-190704:3",
"190705:1-190705:5", "190705:7-190705:65", "190705:81-190705:336", "190705:338-190705:350", "190705:353-190705:383",
"190706:1-190706:126", "190707:1-190707:237", "190707:239-190707:257", "190708:1-190708:189", "190733:71-190733:96",
"190733:99-190733:389", "190733:392-190733:460", "190736:1-190736:80", "190736:83-190736:185", "190738:1-190738:130",
"190738:133-190738:226", "190738:229-190738:349", "190782:55-190782:181", "190782:184-190782:233", "190782:236-190782:399",
"190782:401-190782:409", "190895:64-190895:202", "190895:210-190895:302", "190895:305-190895:584", "190895:587-190895:948",
"190906:73-190906:256", "190906:259-190906:354", "190906:356-190906:496", "190945:124-190945:207", "190949:1-190949:81",
"191043:45-191043:46", "191046:1-191046:21", "191046:24-191046:82", "191046:84-191046:88", "191046:92-191046:116",
"191046:119-191046:180", "191046:183", "191046:185-191046:239", "191056:1", "191056:4-191056:9",
"191056:16-191056:17", "191056:19", "191057:1", "191057:4-191057:40", "191062:1",
"191062:3", "191062:5-191062:214", "191062:216-191062:541", "191090:1-191090:55", "191201:38-191201:49",
"191201:52-191201:79", "191202:1-191202:64", "191202:66-191202:68", "191202:87-191202:105", "191202:108-191202:118",
"191226:77-191226:78", "191226:81-191226:831", "191226:833-191226:1454", "191226:1456-191226:1466", "191226:1469-191226:1507",
"191226:1510-191226:1686", "191247:1-191247:153", "191247:156-191247:280", "191247:283-191247:606", "191247:608-191247:620",
"191247:622-191247:818", "191247:821-191247:834", "191247:837-191247:1031", "191247:1034-191247:1046", "191247:1049-191247:1140",
"191247:1143-191247:1187", "191247:1190-191247:1214", "191247:1217-191247:1224", "191248:1-191248:103", "191264:59-191264:79",
"191264:82-191264:152", "191264:155-191264:189", "191271:56-191271:223", "191271:225-191271:363", "191276:1-191276:16",
"191277:1-191277:28", "191277:30-191277:164", "191277:167-191277:253", "191277:255-191277:457", "191277:460-191277:535",
"191277:537-191277:576", "191277:579-191277:775", "191277:778-191277:811", "191277:813-191277:849", "191367:1-191367:2",
"191411:1-191411:23", "191695:1", "191718:43-191718:95", "191718:98-191718:207", "191720:1",
"191720:3-191720:15", "191720:17-191720:181", "191721:1", "191721:3-191721:34", "191721:36-191721:183",
"191721:186-191721:189", "191726:1-191726:13", "191810:15", "191810:22-191810:49", "191810:52-191810:92",
"191830:54-191830:242", "191830:245-191830:301", "191830:304-191830:393", "191833:1", "191833:3-191833:103",
"191834:1-191834:30", "191834:33-191834:74", "191834:77-191834:299", "191834:302-191834:352", "191837:1-191837:44",
"191837:47-191837:53", "191837:56-191837:65", "191856:1-191856:133", "191859:1-191859:28", "191859:31-191859:126",
"193093:1-193093:33", "193123:1-193123:27", "193124:1-193124:52", "193192:58-193192:86", "193193:1-193193:6",
"193193:8", "193193:11-193193:83", "193193:86-193193:120", "193193:122-193193:160", "193193:162-193193:274",
"193193:276-193193:495", "193193:497-193193:506", "193207:54-193207:182", "193334:29-193334:172", "193336:1-193336:264",
"193336:267-193336:492", "193336:495-193336:684", "193336:687-193336:729", "193336:732-193336:951", "193541:77-193541:101",
"193541:103-193541:413", "193541:416-193541:575", "193541:578-193541:619", "193556:41-193556:83", "193557:1-193557:84",
"193575:48-193575:173", "193575:176-193575:349", "193575:351-193575:394", "193575:397-193575:415", "193575:417-193575:658",
"193575:660-193575:752", "193621:60-193621:570", "193621:573-193621:769", "193621:772-193621:976", "193621:979-193621:1053",
"193621:1056-193621:1137", "193621:1139-193621:1193", "193621:1195-193621:1371", "193621:1373-193621:1654", "193834:1-193834:35",
"193835:1-193835:20", "193835:22-193835:26", "193836:1-193836:2", "193998:66-193998:113", "193998:115-193998:278",
"193999:1-193999:45", "194027:57-194027:113", "194050:53-194050:113", "194050:116-194050:273", "194050:275-194050:355",
"194050:357-194050:369", "194050:372-194050:391", "194050:394-194050:490", "194050:492-194050:814", "194050:816-194050:1435",
"194050:1437-194050:1735", "194050:1760-194050:1888", "194051:1-194051:12", "194052:1-194052:99", "194052:102-194052:166",
"194075:48-194075:101", "194075:103", "194075:105-194075:107", "194075:109", "194075:111",
"194076:1-194076:9", "194076:11-194076:55", "194076:58-194076:163", "194076:165-194076:228", "194076:230-194076:264",
"194076:267-194076:507", "194076:509-194076:527", "194076:530-194076:538", "194076:541-194076:562", "194076:565-194076:748",
"194108:81-194108:161", "194108:164-194108:264", "194108:266-194108:373", "194108:376-194108:396", "194108:398-194108:433",
"194108:436-194108:452", "194108:454-194108:577", "194108:579-194108:590", "194108:593-194108:668", "194108:671-194108:872",
"194115:66-194115:184", "194115:186-194115:338", "194115:340-194115:346", "194115:348-194115:493", "194115:496-194115:731",
"194115:819-194115:857", "194117:1-194117:38", "194119:1-194119:229", "194119:232-194119:261", "194120:1-194120:162",
"194120:165-194120:406", "194150:42-194150:127", "194150:129-194150:261", "194150:264-194150:311", "194151:47-194151:72",
"194151:75-194151:191", "194151:193-194151:238", "194151:240-194151:617", "194151:619", "194151:621",
"194151:623", "194153:1-194153:115", "194199:96-194199:227", "194199:229-194199:336", "194199:339-194199:402",
"194210:3-194210:195", "194210:198-194210:217", "194210:220-194210:359", "194210:361-194210:555", "194223:61-194223:112",
"194224:1-194224:126", "194224:129-194224:206", "194224:208-194224:250", "194224:253-194224:309", "194224:312-194224:386",
"194224:389-194224:412", "194225:1-194225:23", "194225:26-194225:47", "194225:49-194225:85", "194225:88-194225:149",
"194270:56-194270:68", "194303:56-194303:66", "194303:69-194303:102", "194304:1-194304:43", "194304:46",
"194305:1-194305:84", "194314:52-194314:130", "194314:133-194314:300", "194315:1-194315:10", "194315:13-194315:314",
"194315:317-194315:428", "194315:431-194315:452", "194315:455-194315:467", "194317:1-194317:20", "194424:63-194424:141",
"194424:144-194424:195", "194424:198-194424:266", "194424:268-194424:421", "194424:424-194424:478", "194424:481-194424:531",
"194424:534-194424:553", "194424:556-194424:706", "194424:708", "194428:1-194428:85", "194428:87-194428:122",
"194428:125-194428:294", "194428:296-194428:465", "194429:1-194429:4", "194429:7-194429:54", "194429:57-194429:147",
"194429:150-194429:411", "194429:413-194429:742", "194429:745-194429:986", "194429:988-194429:1019", "194439:46-194439:77",
"194439:79-194439:106", "194455:45-194455:64", "194455:67-194455:140", "194455:142-194455:255", "194455:293-194455:303",
"194464:1-194464:127", "194464:130-194464:142", "194464:145-194464:210", "194479:1-194479:44", "194479:165-194479:232",
"194479:235-194479:262", "194479:265-194479:374", "194479:377-194479:431", "194479:434-194479:489", "194479:492-194479:529",
"194479:531-194479:566", "194480:1-194480:32", "194480:34-194480:205", "194480:207-194480:375", "194480:377-194480:387",
"194480:389-194480:759", "194480:762-194480:956", "194480:959-194480:1402", "194533:46-194533:379", "194533:382-194533:415",
"194533:417-194533:618", "194533:620-194533:872", "194619:31-194619:110", "194631:1-194631:42", "194631:44-194631:100",
"194631:102-194631:169", "194631:171-194631:222", "194643:1-194643:287", "194644:1-194644:168", "194644:171-194644:181",
"194644:184-194644:185", "194644:187-194644:319", "194644:321-194644:421", "194691:61-194691:104", "194691:107-194691:155",
"194691:158-194691:251", "194691:254-194691:268", "194691:271-194691:272", "194691:275-194691:289", "194691:292-194691:313",
"194699:1-194699:30", "194699:32-194699:52", "194699:55-194699:64", "194699:67-194699:71", "194699:73-194699:154",
"194699:157-194699:215", "194699:218-194699:238", "194699:241-194699:259", "194702:1-194702:138", "194702:141-194702:191",
"194704:1-194704:41", "194704:44-194704:545", "194704:548-194704:592", "194711:1-194711:7", "194711:9-194711:619",
"194712:1-194712:56", "194712:61-194712:418", "194712:420-194712:625", "194712:627-194712:759", "194735:44-194735:71",
"194735:74-194735:101", "194735:104-194735:130", "194778:60-194778:118", "194778:120-194778:219", "194789:1-194789:18",
"194789:21-194789:32", "194789:34-194789:80", "194789:82-194789:166", "194789:168-194789:269", "194789:272-194789:405",
"194789:409-194789:414", "194789:417-194789:427", "194789:430-194789:566", "194790:1-194790:45", "194825:72-194825:117",
"194825:120-194825:221", "194896:34-194896:55", "194896:58-194896:79", "194896:82-194896:103", "194897:1-194897:6",
"194897:8-194897:78", "194897:80-194897:96", "194897:98-194897:102", "194912:53-194912:70", "194912:72-194912:96",
"194912:98-194912:444", "194912:446-194912:450", "194912:453-194912:467", "194912:470-194912:561", "194912:564-194912:660",
"194912:663-194912:813", "194912:815-194912:840", "194912:843-194912:864", "194912:866-194912:1004", "194912:1007-194912:1025",
"194912:1027-194912:1067", "194912:1069-194912:1137", "194912:1140-194912:1166", "194912:1168-194912:1249", "194912:1251-194912:1304",
"194912:1307-194912:1444", "194912:1447-194912:1487", "194912:1489-194912:1503", "194912:1506-194912:1662", "194914:1-194914:38",
"194915:1-194915:74", "195013:94-195013:144", "195013:146-195013:185", "195013:187-195013:206", "195013:208-195013:299",
"195013:302-195013:324", "195013:326-195013:366", "195013:369-195013:447", "195013:450-195013:526", "195013:528-195013:541",
"195014:1-195014:6", "195014:9-195014:119", "195014:121-195014:148", "195015:1-195015:13", "195016:1-195016:21",
"195016:23-195016:55", "195016:58-195016:63", "195016:65-195016:174", "195016:177-195016:184", "195016:186-195016:241",
"195016:243-195016:246", "195016:248-195016:251", "195016:254-195016:367", "195016:370-195016:422", "195016:425-195016:560",
"195016:563-195016:569", "195099:70-195099:144", "195099:147-195099:186", "195099:189-195099:208", "195099:211-195099:224",
"195099:227-195099:248", "195109:98-195109:241", "195112:1-195112:12", "195112:15-195112:26", "195113:1-195113:209",
"195113:212-195113:388", "195113:391-195113:403", "195113:406-195113:419", "195113:422-195113:492", "195113:495-195113:579",
"195114:1-195114:69", "195114:72-195114:103", "195115:1-195115:7", "195115:10-195115:22", "195147:132-195147:282",
"195147:285-195147:294", "195147:297-195147:331", "195147:334-195147:363", "195147:366-195147:442", "195147:445-195147:536",
"195147:539-195147:559", "195163:72-195163:138", "195163:140-195163:224", "195163:227-195163:240", "195163:243",
"195163:246-195163:347", "195164:1-195164:64", "195165:1-195165:4", "195165:7-195165:41", "195165:44-195165:54",
"195165:56-195165:153", "195165:156-195165:260", "195165:263-195165:266", "195251:1-195251:131", "195251:134-195251:137",
"195251:140-195251:152", "195251:154-195251:165", "195251:167-195251:242", "195303:109-195303:191", "195303:194-195303:277",
"195303:280-195303:310", "195303:312-195303:316", "195303:318-195303:409", "195304:1-195304:3", "195304:6-195304:22",
"195304:27-195304:80", "195304:83-195304:100", "195304:103-195304:154", "195304:157-195304:341", "195304:344-195304:588",
"195304:590-195304:727", "195304:729-195304:1003", "195304:1006-195304:1079", "195304:1083-195304:1140", "195304:1143-195304:1229",
"195378:90-195378:117", "195378:120-195378:127", "195378:130-195378:185", "195378:187-195378:204", "195378:206-195378:302",
"195378:305-195378:542", "195378:544-195378:565", "195378:567-195378:645", "195378:647-195378:701", "195378:703-195378:734",
"195378:737-195378:1120", "195378:1122-195378:1133", "195390:1", "195390:4-195390:27", "195390:30-195390:145",
"195390:147-195390:183", "195390:186-195390:187", "195390:190-195390:208", "195390:210-195390:213", "195390:215-195390:400",
"195396:49-195396:55", "195396:58-195396:63", "195396:66-195396:131", "195397:1-195397:10", "195397:12-195397:89",
"195397:92-195397:120", "195397:123-195397:141", "195397:143-195397:251", "195397:253", "195397:256-195397:475",
"195397:478-195397:525", "195397:527-195397:608", "195397:611-195397:776", "195397:779-195397:970", "195397:972-195397:1121",
"195397:1123-195397:1181", "195397:1184-195397:1198", "195397:1200-195397:1209", "195398:3-195398:137", "195398:139-195398:494",
"195398:497-195398:585", "195398:587-195398:817", "195398:820-195398:824", "195398:827-195398:1225", "195398:1228-195398:1307",
"195398:1309-195398:1712", "195398:1721-195398:1736", "195398:1741-195398:1752", "195398:1767-195398:1795", "195399:1-195399:192",
"195399:194-195399:382", "195530:1-195530:80", "195530:82-195530:104", "195530:107-195530:156", "195530:159-195530:300",
"195530:302-195530:405", "195540:68-195540:123", "195540:126-195540:137", "195540:140-195540:283", "195540:286-195540:319",
"195551:91-195551:106", "195552:1-195552:21", "195552:23-195552:27", "195552:30-195552:147", "195552:149-195552:155",
"195552:158-195552:182", "195552:185-195552:287", "195552:290-195552:349", "195552:352-195552:469", "195552:472-195552:815",
"195552:818-195552:823", "195552:825-195552:883", "195552:885-195552:1152", "195552:1154-195552:1300", "195552:1303-195552:1789",
"195633:40-195633:42", "195647:1-195647:41", "195649:1-195649:69", "195649:72-195649:151", "195649:154-195649:181",
"195649:183-195649:247", "195655:1-195655:129", "195655:131-195655:184", "195655:186-195655:260", "195655:263-195655:350",
"195655:353-195655:446", "195655:448-195655:483", "195655:485-195655:498", "195656:1-195656:362", "195658:1-195658:37",
"195658:40-195658:362", "195658:364-195658:382", "195658:384-195658:386", "195749:1-195749:8", "195749:10-195749:33",
"195749:36-195749:131", "195757:1-195757:82", "195757:85-195757:115", "195757:118-195757:161", "195757:163-195757:206",
"195758:1-195758:18", "195774:1-195774:13", "195774:16-195774:137", "195774:139-195774:151", "195774:154-195774:162",
"195774:164-195774:256", "195774:258-195774:276", "195774:279-195774:362", "195774:365-195774:466", "195774:469-195774:618",
"195774:620-195774:649", "195774:651-195774:830", "195775:1-195775:57", "195775:60-195775:100", "195775:103-195775:170",
"195776:1-195776:63", "195776:66-195776:283", "195776:286-195776:337", "195776:340-195776:399", "195776:401-195776:409",
"195776:411-195776:477", "195841:74-195841:85", "195868:1-195868:88", "195868:90-195868:107", "195868:110-195868:205",
"195915:1-195915:109", "195915:111-195915:275", "195915:278-195915:390", "195915:393-195915:417", "195915:419-195915:429",
"195915:432-195915:505", "195915:507-195915:747", "195915:749-195915:785", "195915:787-195915:828", "195915:830-195915:850",
"195916:1-195916:16", "195916:19-195916:68", "195916:71-195916:212", "195917:1-195917:4", "195918:1-195918:44",
"195918:46", "195918:49-195918:64", "195919:1-195919:15", "195923:1-195923:14", "195925:1-195925:12",
"195926:1", "195926:3-195926:19", "195926:21-195926:34", "195929:1-195929:29", "195930:1-195930:77",
"195930:80-195930:176", "195930:179-195930:526", "195930:529-195930:596", "195937:1-195937:28", "195937:31-195937:186",
"195937:188-195937:396", "195947:23-195947:62", "195947:64-195947:88", "195948:51-195948:116", "195948:119-195948:144",
"195948:147", "195948:150-195948:352", "195948:355-195948:369", "195948:372-195948:402", "195948:404-195948:500",
"195948:503-195948:540", "195948:543-195948:565", "195948:567-195948:602", "195948:605-195948:615", "195950:1-195950:71",
"195950:73-195950:138", "195950:141-195950:169", "195950:172-195950:332", "195950:335-195950:350", "195950:353-195950:382",
"195950:385-195950:421", "195950:424-195950:450", "195950:453-195950:483", "195950:485-195950:616", "195950:619-195950:715",
"195950:718-195950:787", "195950:789-195950:800", "195950:803-195950:829", "195950:831", "195950:833-195950:1587",
"195963:54-195963:58", "195970:44-195970:49", "195970:51-195970:85", "196019:54-196019:68", "196027:1-196027:55",
"196027:58-196027:119", "196027:121-196027:155", "196027:158-196027:186", "196046:12-196046:40", "196047:1-196047:64",
"196047:70-196047:75", "196048:1-196048:44", "196048:46-196048:48", "196197:58-196197:122", "196197:125-196197:179",
"196197:181-196197:311", "196197:313-196197:516", "196197:519-196197:562", "196199:1-196199:33", "196199:36-196199:83",
"196199:86-196199:118", "196199:121-196199:147", "196199:150-196199:237", "196199:239-196199:285", "196199:287-196199:534",
"196200:1-196200:68", "196202:3-196202:61", "196202:64-196202:108", "196203:1-196203:102", "196203:107-196203:117",
"196218:55-196218:199", "196218:201-196218:224", "196218:226-196218:393", "196218:396-196218:494", "196218:496-196218:741",
"196218:744-196218:752", "196218:754-196218:757", "196218:759-196218:820", "196239:1-196239:59", "196239:62-196239:154",
"196239:157-196239:272", "196239:274-196239:373", "196239:375-196239:432", "196239:435-196239:465", "196239:468-196239:647",
"196239:650-196239:706", "196239:709-196239:1025", "196249:63-196249:77", "196249:80-196249:99", "196250:1-196250:2",
"196250:5-196250:265", "196250:267-196250:426", "196252:1-196252:35", "196334:59-196334:111", "196334:113-196334:123",
"196334:126-196334:132", "196334:135-196334:167", "196334:170-196334:193", "196334:196-196334:257", "196334:259-196334:267",
"196334:270-196334:289", "196334:292-196334:342", "196349:65-196349:84", "196349:86-196349:154", "196349:157-196349:244",
"196349:246-196349:258", "196357:1-196357:4", "196359:1-196359:2", "196362:1-196362:88", "196363:1-196363:8",
"196363:11-196363:34", "196364:1-196364:93", "196364:96-196364:136", "196364:139-196364:365", "196364:368-196364:380",
"196364:382-196364:601", "196364:603-196364:795", "196364:798-196364:884", "196364:887-196364:1196", "196364:1199-196364:1200",
"196364:1203-196364:1299", "196437:1", "196437:3-196437:74", "196437:77-196437:169", "196438:1-196438:181",
"196438:184-196438:699", "196438:701-196438:1269", "196452:82-196452:112", "196452:114-196452:490", "196452:493-196452:586",
"196452:589-196452:618", "196452:622-196452:668", "196452:671-196452:716", "196452:718-196452:726", "196452:728-196452:956",
"196452:958-196452:1004", "196452:1007-196452:1091", "196453:1-196453:74", "196453:77-196453:145", "196453:147-196453:669",
"196453:673-196453:714", "196453:717-196453:799", "196453:802-196453:988", "196453:991-196453:1178", "196453:1180",
"196453:1182-196453:1248", "196453:1250-196453:1528", "196453:1531-196453:1647", "196495:114-196495:180", "196495:182-196495:272",
"196509:1-196509:68", "196531:62-196531:150", "196531:152-196531:253", "196531:256-196531:285", "196531:288-196531:302",
"196531:305-196531:422", "196531:425-196531:440", "198049:1-198049:11", "198049:14-198049:57", "198050:2-198050:155",
"198063:1-198063:37", "198063:40-198063:72", "198063:74-198063:124", "198063:127-198063:294", "198116:36-198116:52",
"198116:54-198116:55", "198116:58-198116:96", "198116:98-198116:112", "198207:1-198207:97", "198208:1-198208:92",
"198208:94-198208:134", "198208:137-198208:147", "198208:150-198208:209", "198210:1-198210:221", "198212:1-198212:574",
"198213:1-198213:107", "198215:1-198215:12", "198230:1-198230:33", "198230:36-198230:57", "198230:60-198230:235",
"198230:237-198230:324", "198230:326-198230:388", "198230:390-198230:459", "198230:462-198230:625", "198230:627-198230:651",
"198230:653-198230:805", "198230:808-198230:811", "198230:814-198230:948", "198230:950-198230:1090", "198230:1093-198230:1103",
"198230:1106-198230:1332", "198230:1335-198230:1380", "198249:1-198249:7", "198269:3-198269:198", "198271:1-198271:91",
"198271:93-198271:170", "198271:173-198271:299", "198271:301-198271:450", "198271:453-198271:513", "198271:516-198271:616",
"198271:619-198271:628", "198271:631-198271:791", "198271:793-198271:797", "198272:1-198272:185", "198272:188-198272:245",
"198272:248-198272:314", "198272:317-198272:433", "198272:436-198272:444", "198272:454-198272:620", "198346:44-198346:47",
"198372:57-198372:110", "198485:68-198485:109", "198485:112-198485:134", "198485:136-198485:181", "198485:184-198485:239",
"198487:1-198487:145", "198487:147-198487:514", "198487:517-198487:668", "198487:671-198487:733", "198487:736-198487:757",
"198487:760-198487:852", "198487:854-198487:994", "198487:997-198487:1434", "198487:1437-198487:1610", "198522:65-198522:144",
"198522:147-198522:208", "198941:102-198941:189", "198941:191-198941:220", "198941:222-198941:241", "198941:243-198941:249",
"198941:252-198941:284", "198954:108-198954:156", "198954:159-198954:277", "198955:1-198955:45", "198955:47-198955:50",
"198955:53-198955:220", "198955:223-198955:269", "198955:271-198955:284", "198955:286-198955:338", "198955:340-198955:580",
"198955:583-198955:742", "198955:744-198955:910", "198955:913-198955:946", "198955:949-198955:1162", "198955:1165-198955:1169",
"198955:1172-198955:1182", "198955:1185-198955:1188", "198955:1190-198955:1246", "198955:1249-198955:1304", "198955:1306-198955:1467",
"198955:1470-198955:1485", "198955:1487-198955:1552", "198969:58-198969:81", "198969:84-198969:247", "198969:249-198969:323",
"198969:325-198969:365", "198969:367-198969:413", "198969:416-198969:466", "198969:468-198969:643", "198969:646-198969:918",
"198969:920-198969:1011", "198969:1013-198969:1175", "198969:1178-198969:1236", "198969:1239-198969:1253", "199008:75-199008:93",
"199008:95-199008:121", "199008:124-199008:208", "199008:211-199008:331", "199008:333-199008:373", "199008:376-199008:482",
"199008:485-199008:605", "199008:608-199008:644", "199011:1-199011:11", "199011:13-199011:24", "199021:59-199021:88",
"199021:91-199021:128", "199021:130-199021:133", "199021:136-199021:309", "199021:311-199021:333", "199021:335-199021:410",
"199021:414-199021:469", "199021:471-199021:533", "199021:535-199021:563", "199021:565-199021:1223", "199021:1226-199021:1479",
"199021:1481-199021:1494", "199318:65-199318:138", "199319:1-199319:7", "199319:9-199319:223", "199319:226-199319:277",
"199319:280-199319:348", "199319:351-199319:358", "199319:360-199319:422", "199319:424-199319:490", "199319:492-199319:493",
"199319:496-199319:612", "199319:615-199319:642", "199319:645-199319:720", "199319:723-199319:728", "199319:730-199319:731",
"199319:734-199319:741", "199319:744-199319:752", "199319:754-199319:943", "199319:945-199319:997", "199336:1-199336:33",
"199336:36-199336:122", "199336:125-199336:231", "199336:234-199336:614", "199336:617-199336:789", "199336:791-199336:977",
"199356:95-199356:121", "199356:123-199356:168", "199356:171-199356:205", "199356:208-199356:231", "199409:25-199409:54",
"199409:56-199409:89", "199409:91-199409:204", "199409:206-199409:290", "199409:293-199409:583", "199409:586-199409:602",
"199409:604-199409:1014", "199409:1016-199409:1300", "199428:61-199428:197", "199428:200-199428:210", "199428:212-199428:382",
"199428:387-199428:414", "199428:417-199428:436", "199428:439-199428:530", "199428:533-199428:648", "199429:1-199429:28",
"199429:30-199429:36", "199429:39-199429:55", "199429:58-199429:101", "199429:103-199429:148", "199429:151-199429:154",
"199435:63-199435:106", "199435:109-199435:261", "199435:263-199435:579", "199435:582-199435:654", "199435:656-199435:696",
"199435:699-199435:1034", "199435:1037-199435:1144", "199435:1147-199435:1327", "199435:1330-199435:1411", "199435:1414-199435:1431",
"199435:1434-199435:1441", "199435:1444-199435:1487", "199435:1489-199435:1610", "199436:1-199436:113", "199436:116-199436:254",
"199436:257-199436:675", "199436:678-199436:748", "199564:1-199564:3", "199569:1-199569:2", "199569:5-199569:136",
"199569:139-199569:367", "199570:1-199570:17", "199571:1-199571:184", "199571:186-199571:360", "199571:363-199571:561",
"199572:1-199572:317", "199573:1-199573:22", "199574:1-199574:53", "199574:56-199574:153", "199574:156-199574:246",
"199608:60-199608:157", "199608:159-199608:209", "199608:211-199608:341", "199608:344-199608:390", "199608:392-199608:461",
"199608:464-199608:800", "199608:802-199608:1064", "199608:1067-199608:1392", "199608:1395-199608:1630", "199608:1633-199608:1904",
"199608:1907-199608:1962", "199608:1965-199608:2252", "199608:2255-199608:2422", "199698:72-199698:94", "199698:96-199698:127",
"199699:1-199699:154", "199699:157-199699:169", "199699:172-199699:410", "199699:412-199699:756", "199703:1-199703:94",
"199703:97-199703:482", "199703:485-199703:529", "199739:66-199739:133", "199751:103-199751:119", "199751:121-199751:127",
"199752:1-199752:141", "199752:144-199752:180", "199752:182-199752:186", "199752:188-199752:211", "199752:214-199752:322",
"199753:1-199753:59", "199754:1-199754:203", "199754:205-199754:325", "199754:328-199754:457", "199754:459-199754:607",
"199754:610-199754:613", "199754:615-199754:806", "199754:808-199754:998", "199804:78-199804:88", "199804:90-199804:181",
"199804:183-199804:235", "199804:238-199804:278", "199804:281-199804:290", "199804:292-199804:519", "199804:522-199804:575",
"199804:577-199804:628", "199804:631-199804:632", "199812:70-199812:141", "199812:144-199812:163", "199812:182-199812:211",
"199812:214-199812:471", "199812:474-199812:505", "199812:508-199812:557", "199812:560-199812:571", "199812:574-199812:623",
"199812:626-199812:751", "199812:754-199812:796", "199832:58-199832:62", "199832:65-199832:118", "199832:121-199832:139",
"199832:142-199832:286", "199833:1-199833:13", "199833:16-199833:103", "199833:105-199833:250", "199833:253-199833:493",
"199833:496-199833:794", "199833:797-199833:1032", "199833:1034-199833:1185", "199833:1188-199833:1239", "199834:1-199834:9",
"199834:11", "199834:14-199834:18", "199834:21-199834:54", "199834:56-199834:57", "199834:62-199834:65",
"199834:69-199834:284", "199834:286-199834:503", "199834:505-199834:942", "199862:59-199862:141", "199864:1-199864:87",
"199864:89", "199864:92-199864:103", "199864:106-199864:372", "199864:374-199864:385", "199864:388-199864:486",
"199867:1-199867:134", "199867:136-199867:172", "199867:174-199867:218", "199867:221-199867:320", "199868:1-199868:21",
"199875:70-199875:150", "199875:152-199875:334", "199876:1-199876:19", "199876:22-199876:95", "199876:97-199876:249",
"199876:252-199876:272", "199876:274-199876:340", "199876:343-199876:362", "199876:365-199876:376", "199877:1-199877:173",
"199877:175-199877:605", "199877:607-199877:701", "199877:703-199877:871", "199960:72-199960:139", "199960:141-199960:197",
"199960:204-199960:232", "199960:235-199960:363", "199960:365-199960:367", "199960:370-199960:380", "199960:383-199960:459",
"199960:461-199960:466", "199960:469-199960:485", "199961:1-199961:211", "199961:213-199961:287", "199967:60-199967:120",
"199967:122-199967:170", "199967:172-199967:198", "199973:73-199973:89", "200041:62-200041:83", "200041:85-200041:157",
"200041:162-200041:274", "200041:277-200041:318", "200041:321-200041:335", "200041:337-200041:386", "200041:388-200041:389",
"200041:392-200041:400", "200041:402-200041:568", "200041:571-200041:593", "200041:595-200041:646", "200041:649-200041:728",
"200041:731-200041:860", "200041:862-200041:930", "200041:932-200041:1096", "200042:1-200042:110", "200042:112-200042:536",
"200049:1-200049:177", "200075:76-200075:139", "200075:142-200075:232", "200075:256-200075:326", "200075:329-200075:422",
"200075:425-200075:431", "200075:434-200075:500", "200075:502-200075:605", "200091:67", "200091:70-200091:151",
"200091:154-200091:172", "200091:174-200091:187", "200091:190-200091:196", "200091:199-200091:201", "200091:204-200091:425",
"200091:428-200091:535", "200091:537-200091:607", "200091:610-200091:879", "200091:881-200091:943", "200091:946-200091:999",
"200091:1001-200091:1025", "200091:1027-200091:1132", "200091:1135-200091:1339", "200091:1341-200091:1433", "200091:1435-200091:1450",
"200091:1453-200091:1523", "200091:1526-200091:1664", "200091:1667-200091:1680", "200091:1683-200091:1710", "200152:74-200152:116",
"200160:52-200160:68", "200161:1-200161:97", "200161:100-200161:112", "200174:81-200174:84", "200177:1-200177:56",
"200178:1-200178:38", "200180:1-200180:18", "200186:1-200186:3", "200186:6-200186:24", "200188:1-200188:24",
"200188:27-200188:28", "200188:31-200188:76", "200188:79-200188:271", "200188:274-200188:352", "200190:1-200190:4",
"200190:6-200190:76", "200190:79-200190:143", "200190:146-200190:159", "200190:162-200190:256", "200190:258-200190:321",
"200190:324-200190:401", "200190:403-200190:453", "200190:456-200190:457", "200190:460-200190:565", "200190:567-200190:588",
"200190:591", "200190:593-200190:595", "200190:597-200190:646", "200190:649-200190:878", "200229:1-200229:33",
"200229:41-200229:219", "200229:222-200229:244", "200229:247-200229:290", "200229:293-200229:624", "200229:627-200229:629",
"200243:69-200243:103", "200243:106-200243:139", "200244:3-200244:304", "200244:307-200244:442", "200244:445-200244:507",
"200244:510-200244:619", "200245:1-200245:103", "200245:105-200245:128", "200245:131-200245:248", "200245:251-200245:357",
"200368:72-200368:180", "200369:1-200369:5", "200369:8-200369:61", "200369:64-200369:360", "200369:363-200369:439",
"200369:441-200369:578", "200369:580-200369:603", "200369:606-200369:684", "200369:686", "200381:8-200381:15",
"200381:18-200381:36", "200381:38-200381:89", "200381:91-200381:195", "200466:134-200466:274", "200473:96-200473:157",
"200473:159-200473:224", "200473:226-200473:304", "200473:306-200473:469", "200473:472-200473:524", "200473:527-200473:542",
"200473:545-200473:619", "200473:622-200473:688", "200473:691-200473:730", "200473:733-200473:738", "200473:740-200473:1324",
"200491:87-200491:107", "200491:110-200491:149", "200491:152-200491:157", "200491:160-200491:197", "200491:199-200491:237",
"200491:240-200491:270", "200491:273", "200491:276-200491:334", "200491:336-200491:360", "200491:363-200491:419",
"200515:97-200515:183", "200519:1-200519:111", "200519:114-200519:126", "200519:129-200519:136", "200519:138-200519:224",
"200519:227-200519:258", "200519:261-200519:350", "200519:353-200519:611", "200519:613-200519:747", "200525:77-200525:149",
"200525:151-200525:164", "200525:166-200525:190", "200525:193-200525:276", "200525:278-200525:311", "200525:314-200525:464",
"200525:467-200525:488", "200525:491-200525:674", "200525:676-200525:704", "200525:707-200525:755", "200525:757-200525:895",
"200525:898-200525:937", "200525:939-200525:990", "200532:1-200532:37", "200599:75-200599:129", "200599:132-200599:137",
"200600:1-200600:183", "200600:186-200600:299", "200600:302-200600:313", "200600:316-200600:324", "200600:327-200600:334",
"200600:336-200600:397", "200600:399-200600:417", "200600:420-200600:526", "200600:529-200600:591", "200600:594-200600:596",
"200600:598-200600:609", "200600:611-200600:660", "200600:663-200600:823", "200600:826-200600:900", "200600:902-200600:943",
"200600:945-200600:1139", "200961:1-200961:115", "200976:94-200976:164", "200990:75-200990:143", "200991:1-200991:42",
"200991:44", "200991:47-200991:80", "200991:83-200991:175", "200991:178-200991:181", "200991:184-200991:252",
"200991:255-200991:632", "200991:635-200991:916", "200991:918-200991:1017", "200991:1019-200991:1048", "200992:1-200992:405",
"200992:408-200992:434", "200992:436-200992:581", "201062:78-201062:268", "201097:83-201097:136", "201097:138-201097:245",
"201097:248-201097:300", "201097:303-201097:370", "201097:372-201097:429", "201097:432-201097:497", "201114:1-201114:14",
"201115:1-201115:73", "201159:70-201159:211", "201164:1-201164:8", "201164:10-201164:94", "201164:96-201164:125",
"201164:128-201164:178", "201164:180-201164:198", "201164:200-201164:271", "201164:274-201164:416", "201164:418",
"201168:1-201168:37", "201168:39-201168:275", "201168:278-201168:481", "201168:483-201168:558", "201168:560-201168:730",
"201173:1-201173:194", "201173:197-201173:586", "201174:1-201174:214", "201174:216-201174:263", "201174:265-201174:339",
"201174:342-201174:451", "201191:75-201191:98", "201191:100-201191:216", "201191:218-201191:389", "201191:392-201191:492",
"201191:494-201191:506", "201191:509-201191:585", "201191:587-201191:594", "201191:597-201191:607", "201191:609-201191:794",
"201191:796-201191:838", "201191:841-201191:974", "201191:977-201191:1105", "201191:1108-201191:1117", "201191:1120-201191:1382",
"201191:1385-201191:1386", "201193:1-201193:19", "201196:1-201196:238", "201196:241-201196:278", "201196:286-201196:299",
"201196:302-201196:338", "201196:341-201196:515", "201196:518-201196:720", "201196:723-201196:789", "201196:803-201196:841",
"201197:1-201197:23", "201202:1-201202:437", "201229:1-201229:5", "201229:8-201229:26", "201229:29-201229:73",
"201278:62-201278:163", "201278:166-201278:229", "201278:232-201278:256", "201278:259-201278:316", "201278:318-201278:595",
"201278:598-201278:938", "201278:942-201278:974", "201278:976-201278:1160", "201278:1163-201278:1304", "201278:1306-201278:1793",
"201278:1796-201278:1802", "201278:1805-201278:1906", "201278:1909-201278:1929", "201278:1932-201278:2174", "201554:70-201554:86",
"201554:88-201554:114", "201554:116-201554:126", "201602:76-201602:81", "201602:83-201602:194", "201602:196-201602:494",
"201602:496-201602:614", "201602:617-201602:635", "201611:87-201611:145", "201611:149-201611:182", "201611:184-201611:186",
"201613:1-201613:42", "201613:44-201613:49", "201613:53-201613:210", "201613:213-201613:215", "201613:218-201613:225",
"201613:228-201613:646", "201624:83-201624:92", "201624:95-201624:240", "201624:270", "201625:211-201625:312",
"201625:315-201625:348", "201625:351-201625:416", "201625:418-201625:588", "201625:591-201625:671", "201625:673-201625:758",
"201625:760-201625:791", "201625:793-201625:944", "201657:77-201657:93", "201657:95-201657:108", "201657:110-201657:118",
"201658:1-201658:19", "201658:21-201658:118", "201658:121-201658:136", "201658:139-201658:288", "201668:78-201668:157",
"201669:1-201669:9", "201669:12-201669:136", "201669:139-201669:141", "201669:143-201669:165", "201671:1-201671:120",
"201671:122-201671:174", "201671:177-201671:462", "201671:464-201671:482", "201671:485-201671:499", "201671:501-201671:545",
"201671:547-201671:571", "201671:574-201671:614", "201671:617-201671:766", "201671:768-201671:896", "201671:899-201671:911",
"201671:914-201671:1007", "201678:1-201678:120", "201679:1-201679:110", "201679:112-201679:241", "201679:244-201679:298",
"201679:302-201679:321", "201679:324-201679:461", "201679:463-201679:483", "201692:78-201692:81", "201692:83-201692:179",
"201705:65-201705:73", "201705:75-201705:109", "201705:111-201705:187", "201706:1-201706:62", "201707:1-201707:23",
"201707:26-201707:42", "201707:45-201707:115", "201707:118-201707:130", "201707:133-201707:160", "201707:163-201707:276",
"201707:279-201707:471", "201707:473-201707:511", "201707:514-201707:545", "201707:547-201707:570", "201707:572-201707:622",
"201707:625-201707:735", "201707:738-201707:806", "201707:809-201707:876", "201707:879-201707:964", "201708:1-201708:79",
"201718:58-201718:108", "201727:67-201727:185", "201729:6-201729:20", "201729:22-201729:75", "201729:77-201729:126",
"201729:129-201729:154", "201729:156-201729:216", "201729:219-201729:244", "201794:58-201794:94", "201802:68-201802:209",
"201802:211-201802:214", "201802:216-201802:220", "201802:223-201802:288", "201802:290-201802:296", "201816:1-201816:72",
"201816:74-201816:105", "201816:107-201816:157", "201817:1-201817:274", "201818:1", "201819:1-201819:94",
"201819:96-201819:241", "201824:1-201824:139", "201824:141-201824:176", "201824:179-201824:286", "201824:289-201824:492",
"202012:98-202012:121", "202012:126-202012:131", "202013:1-202013:2", "202013:5-202013:35", "202013:38-202013:57",
"202014:1-202014:5", "202014:8-202014:14", "202014:16-202014:18", "202014:20-202014:77", "202014:79-202014:102",
"202014:104-202014:174", "202014:177-202014:190", "202014:192-202014:196", "202016:1-202016:48", "202016:51-202016:134",
"202016:137-202016:177", "202016:179-202016:743", "202016:745-202016:831", "202016:834-202016:890", "202016:893-202016:896",
"202016:898-202016:932", "202016:934-202016:1010", "202044:84-202044:101", "202044:104-202044:266", "202044:268-202044:461",
"202044:463-202044:466", "202045:1-202045:30", "202045:33-202045:72", "202045:75-202045:528", "202045:531-202045:601",
"202045:603-202045:785", "202045:788-202045:809", "202045:822-202045:823", "202054:6-202054:266", "202054:268-202054:489",
"202054:492-202054:605", "202054:608-202054:631", "202060:76-202060:142", "202060:144-202060:154", "202060:156-202060:244",
"202060:246-202060:497", "202060:499-202060:642", "202060:644-202060:682", "202060:684-202060:743", "202060:746-202060:936",
"202074:66-202074:174", "202075:1-202075:18", "202075:21-202075:187", "202075:189-202075:214", "202075:217-202075:247",
"202075:250-202075:342", "202075:345-202075:406", "202075:409-202075:497", "202075:500-202075:537", "202075:539",
"202075:542-202075:560", "202075:562-202075:615", "202075:618-202075:628", "202084:83-202084:156", "202084:159-202084:177",
"202084:179-202084:180", "202084:182-202084:239", "202087:1-202087:25", "202087:28-202087:208", "202087:210-202087:357",
"202087:359-202087:652", "202087:655-202087:853", "202087:856-202087:1093", "202088:1-202088:286", "202093:1-202093:104",
"202093:107-202093:320", "202093:322-202093:360", "202116:59-202116:60", "202178:67-202178:78", "202178:80-202178:88",
"202178:91-202178:177", "202178:180-202178:186", "202178:188-202178:337", "202178:340-202178:377", "202178:379-202178:425",
"202178:428-202178:475", "202178:478-202178:548", "202178:551-202178:717", "202178:720-202178:965", "202178:967-202178:1444",
"202178:1447-202178:1505", "202178:1508-202178:1519", "202178:1522-202178:1555", "202205:94-202205:114", "202209:1-202209:48",
"202209:51-202209:142", "202237:39-202237:128", "202237:131", "202237:134-202237:219", "202237:222-202237:235",
"202237:238-202237:275", "202237:277-202237:289", "202237:291-202237:316", "202237:319-202237:419", "202237:422-202237:538",
"202237:540-202237:936", "202237:939-202237:950", "202237:952-202237:976", "202237:979-202237:1079", "202272:76-202272:112",
"202272:115-202272:141", "202272:144-202272:185", "202272:188-202272:205", "202272:208-202272:305", "202272:307-202272:313",
"202272:315-202272:371", "202272:436-202272:480", "202272:483-202272:555", "202272:558-202272:577", "202272:579-202272:683",
"202272:686-202272:705", "202272:707-202272:740", "202272:742-202272:890", "202272:937-202272:1295", "202272:1299-202272:1481",
"202299:68-202299:84", "202299:87-202299:141", "202299:143-202299:193", "202299:196-202299:358", "202299:361-202299:379",
"202299:382-202299:414", "202299:416-202299:452", "202299:455-202299:555", "202305:1-202305:89", "202305:92-202305:130",
"202305:133-202305:323", "202314:67-202314:104", "202314:107-202314:265", "202314:268-202314:278", "202328:46-202328:89",
"202328:92-202328:156", "202328:158-202328:276", "202328:278-202328:291", "202328:294-202328:434", "202328:437-202328:460",
"202328:463-202328:586", "202328:588-202328:610", "202328:612-202328:614", "202333:1-202333:235", "202389:81-202389:182",
"202389:185-202389:190", "202389:192-202389:199", "202469:87-202469:158", "202469:160-202469:174", "202469:177-202469:352",
"202472:1-202472:96", "202472:99-202472:112", "202477:1-202477:129", "202477:131-202477:150", "202478:1-202478:177",
"202478:180-202478:183", "202478:186-202478:219", "202478:222-202478:360", "202478:362-202478:506", "202478:509-202478:531",
"202478:534-202478:718", "202478:720-202478:927", "202478:929-202478:973", "202478:975-202478:1029", "202478:1031-202478:1186",
"202478:1189-202478:1212", "202478:1215-202478:1248", "202504:77-202504:96", "202504:99-202504:133", "202504:135-202504:182",
"202504:184-202504:211", "202504:213-202504:241", "202504:243-202504:392", "202504:395-202504:527", "202504:529-202504:617",
"202504:620-202504:715", "202504:718-202504:763", "202504:766-202504:1172", "202504:1174-202504:1247", "202504:1250-202504:1471",
"202504:1474-202504:1679", "202504:1682-202504:1704", "202972:1-202972:30", "202972:33-202972:184", "202972:186-202972:290",
"202972:292-202972:295", "202972:298-202972:371", "202972:374-202972:429", "202972:431-202972:544", "202973:1-202973:234",
"202973:237-202973:305", "202973:308-202973:437", "202973:439-202973:530", "202973:532-202973:541", "202973:544-202973:552",
"202973:555-202973:851", "202973:853-202973:1408", "203002:77-203002:128", "203002:130-203002:141", "203002:144-203002:207",
"203002:209-203002:267", "203002:270-203002:360", "203002:362-203002:501", "203002:504-203002:641", "203002:643-203002:669",
"203002:671", "203002:674-203002:717", "203002:720-203002:1034", "203002:1037-203002:1070", "203002:1073-203002:1370",
"203002:1372-203002:1392", "203002:1395-203002:1410", "203002:1413-203002:1596", "203709:1-203709:121", "203742:1-203742:29",
"203777:103-203777:113", "203830:82-203830:182", "203832:1-203832:11", "203833:1-203833:70", "203833:73-203833:128",
"203834:1-203834:40", "203835:1-203835:70", "203835:73-203835:358", "203853:122-203853:222", "203894:82-203894:272",
"203894:275-203894:477", "203894:480-203894:902", "203894:905-203894:1319", "203909:79-203909:113", "203909:116-203909:117",
"203909:120-203909:140", "203909:143-203909:382", "203912:1-203912:306", "203912:308-203912:566", "203912:569-203912:609",
"203912:611-203912:698", "203912:701-203912:820", "203912:823-203912:865", "203912:867-203912:1033", "203912:1035-203912:1321",
"203987:1-203987:9", "203987:12-203987:241", "203987:243-203987:339", "203987:342-203987:781", "203987:784-203987:1014",
"203992:1-203992:15", "203994:1-203994:56", "203994:59-203994:136", "203994:139-203994:304", "203994:306-203994:342",
"203994:344-203994:425", "204100:117-204100:139", "204101:1-204101:74", "204113:82-204113:96", "204113:98-204113:102",
"204113:105-204113:127", "204113:129-204113:191", "204113:194-204113:258", "204113:261-204113:327", "204113:329-204113:388",
"204113:390-204113:400", "204113:402-204113:583", "204113:585-204113:690", "204114:1-204114:358", "204238:23-204238:52",
"204238:55", "204250:92-204250:118", "204250:121-204250:177", "204250:179-204250:285", "204250:287-204250:336",
"204250:339-204250:400", "204250:403-204250:521", "204250:524-204250:543", "204250:546-204250:682", "204250:684-204250:801",
"204511:1-204511:56", "204541:5-204541:39", "204541:42", "204541:44-204541:139", "204541:142-204541:149",
"204541:151-204541:204", "204544:1-204544:11", "204544:13-204544:93", "204544:96-204544:195", "204544:197-204544:224",
"204544:226-204544:334", "204544:337-204544:426", "204552:1-204552:9", "204553:1-204553:51", "204553:53-204553:60",
"204553:63-204553:101", "204554:1-204554:5", "204554:7-204554:221", "204554:224-204554:455", "204554:458-204554:470",
"204554:472-204554:481", "204554:483-204554:514", "204555:1-204555:329", "204555:331-204555:334", "204563:91-204563:99",
"204563:102-204563:178", "204563:180-204563:219", "204563:222-204563:229", "204563:231-204563:364", "204563:366",
"204563:369-204563:470", "204563:473-204563:524", "204563:527-204563:571", "204564:1-204564:84", "204564:87-204564:89",
"204564:92-204564:159", "204564:161-204564:187", "204564:190-204564:191", "204564:193-204564:293", "204564:296-204564:315",
"204564:317-204564:340", "204564:343-204564:427", "204564:429-204564:434", "204564:437-204564:735", "204564:737-204564:855",
"204564:858-204564:1206", "204564:1209-204564:1248", "204564:1251-204564:1284", "204565:1-204565:48", "204566:1-204566:12",
"204567:1-204567:38", "204576:49-204576:192", "204576:195-204576:301", "204577:1-204577:46", "204577:49-204577:64",
"204577:67-204577:105", "204577:107-204577:170", "204577:173-204577:181", "204577:183-204577:193", "204577:196-204577:653",
"204577:656-204577:669", "204577:671-204577:740", "204577:742-204577:913", "204577:915-204577:1057", "204577:1059-204577:1115",
"204577:1117-204577:1282", "204599:73-204599:83", "204599:85-204599:94", "204599:97-204599:121", "204599:124-204599:125",
"204599:128-204599:173", "204599:175-204599:240", "204599:243-204599:245", "204599:248-204599:264", "204599:266-204599:292",
"204599:294-204599:334", "204601:1-204601:25", "204601:28-204601:62", "204601:65-204601:80", "204601:83-204601:89",
"204601:92-204601:290", "204601:292-204601:563", "204601:565-204601:591", "204601:593-204601:652", "204601:655-204601:780",
"204601:783-204601:812", "204601:814-204601:892", "204601:894-204601:984", "204601:986-204601:1003", "204601:1006-204601:1038",
"204601:1040-204601:1088", "204601:1091-204601:1102", "204601:1105-204601:1161", "204601:1164-204601:1250", "205086:95-205086:149",
"205111:88-205111:390", "205111:392-205111:441", "205111:444-205111:446", "205158:81-205158:289", "205158:292-205158:313",
"205158:315-205158:473", "205158:476-205158:591", "205158:594-205158:595", "205158:597-205158:612", "205158:615-205158:663",
"205158:665-205158:667", "205158:672-205158:685", "205158:687-205158:733", "205193:80-205193:109", "205193:111-205193:349",
"205193:352-205193:486", "205193:488-205193:650", "205193:652-205193:712", "205193:714-205193:902", "205217:1-205217:12",
"205217:16-205217:111", "205217:113-205217:171", "205217:174-205217:250", "205217:253-205217:318", "205233:94-205233:153",
"205236:1-205236:190", "205236:193-205236:207", "205236:209-205236:260", "205236:263-205236:331", "205236:334-205236:352",
"205238:1-205238:6", "205238:9-205238:199", "205238:202-205238:254", "205238:256-205238:304", "205238:306-205238:355",
"205238:358-205238:381", "205238:384-205238:596", "205238:598-205238:617", "205303:35-205303:54", "205303:90-205303:132",
"205303:135-205303:144", "205310:76-205310:306", "205310:309-205310:313", "205310:316", "205310:319-205310:321",
"205310:324-205310:457", "205310:460-205310:559", "205311:1-205311:85", "205311:88-205311:92", "205311:95-205311:183",
"205311:186-205311:395", "205311:397-205311:592", "205311:595-205311:910", "205311:913-205311:1260", "205339:71-205339:175",
"205339:178-205339:213", "205339:216-205339:230", "205339:233-205339:262", "205339:265-205339:404", "205344:1-205344:83",
"205344:86-205344:104", "205344:106-205344:359", "205344:362-205344:431", "205344:433-205344:949", "205344:951-205344:967",
"205344:969-205344:1127", "205344:1129-205344:1346", "205344:1348-205344:1586", "205515:82-205515:201", "205515:203-205515:216",
"205519:1-205519:47", "205519:50-205519:172", "205519:175-205519:367", "205519:370-205519:386", "205519:389-205519:472",
"205526:1-205526:269", "205526:272-205526:277", "205526:280-205526:332", "205614:1-205614:4", "205614:7-205614:40",
"205617:1-205617:29", "205617:32-205617:102", "205617:105-205617:123", "205617:125-205617:140", "205617:143-205617:264",
"205617:266-205617:448", "205617:451-205617:532", "205617:534-205617:547", "205618:1-205618:12", "205620:1-205620:175",
"205666:60-205666:119", "205666:122-205666:165", "205666:168-205666:259", "205666:261-205666:322", "205666:325-205666:578",
"205666:580-205666:594", "205666:597-205666:721", "205666:724-205666:739", "205667:1-205667:165", "205667:168-205667:282",
"205667:285-205667:318", "205667:321-205667:412", "205667:415-205667:689", "205667:692-205667:751", "205667:754-205667:774",
"205667:777-205667:1109", "205683:76-205683:82", "205683:85-205683:178", "205683:181-205683:198", "205683:201-205683:305",
"205690:1-205690:40", "205694:1-205694:205", "205694:208-205694:230", "205694:233-205694:347", "205694:350-205694:452",
"205694:455-205694:593", "205694:595-205694:890", "205718:49-205718:75", "205718:78-205718:97", "205718:100-205718:103",
"205718:105-205718:176", "205718:178-205718:338", "205718:341-205718:361", "205718:363-205718:524", "205718:527-205718:531",
"205718:534-205718:589", "205718:591-205718:694", "205774:1-205774:80", "205777:1-205777:8", "205781:1-205781:89",
"205781:91-205781:197", "205781:200-205781:502", "205826:80-205826:232", "205826:235-205826:303", "205826:306-205826:468",
"205833:84-205833:86", "205833:89-205833:121", "205833:123-205833:155", "205833:157-205833:165", "205833:167-205833:173",
"205833:176-205833:219", "205833:221-205833:267", "205833:270-205833:312", "205833:315-205833:346", "205833:350-205833:355",
"205833:360-205833:366", "205834:1-205834:12", "205834:14-205834:195", "205908:68-205908:200", "205908:202-205908:209",
"205921:22-205921:73", "205921:76-205921:268", "205921:271-205921:394", "205921:397-205921:401", "205921:410-205921:428",
"205921:431-205921:498", "205921:500-205921:571", "205921:574-205921:779", "205921:782-205921:853", "206066:89-206066:146",
"206088:86-206088:159", "206088:161-206088:178", "206088:181-206088:199", "206088:202-206088:286", "206102:83-206102:116",
"206102:120-206102:130", "206102:133-206102:208", "206102:211-206102:235", "206102:238-206102:246", "206102:249-206102:278",
"206102:281-206102:349", "206187:107-206187:169", "206187:172-206187:242", "206187:245-206187:288", "206187:290-206187:340",
"206187:343-206187:427", "206187:429-206187:435", "206187:437-206187:486", "206187:489-206187:569", "206187:571-206187:647",
"206187:649-206187:662", "206187:664-206187:708", "206188:1-206188:40", "206188:42-206188:55", "206199:1-206199:75",
"206199:77-206199:82", "206199:85-206199:114", "206207:82-206207:130", "206207:132-206207:176", "206207:179-206207:194",
"206207:196-206207:388", "206207:390-206207:419", "206207:422-206207:447", "206207:450-206207:569", "206207:572-206207:690",
"206208:1-206208:470", "206208:472-206208:518", "206210:11-206210:25", "206210:28-206210:275", "206210:277-206210:298",
"206210:300-206210:383", "206210:386-206210:466", "206243:62-206243:169", "206243:172-206243:196", "206243:199-206243:354",
"206243:357-206243:433", "206243:435-206243:448", "206243:451-206243:533", "206243:536-206243:554", "206243:557-206243:723",
"206243:726-206243:905", "206245:1-206245:62", "206246:1-206246:14", "206246:16-206246:237", "206246:240-206246:285",
"206246:288-206246:407", "206246:412-206246:676", "206246:678-206246:704", "206246:706-206246:785", "206246:787-206246:962",
"206246:965-206246:997", "206246:1000-206246:1198", "206246:1201-206246:1290", "206257:1-206257:29", "206258:1-206258:36",
"206258:39-206258:223", "206258:226-206258:249", "206302:1-206302:8", "206302:11-206302:33", "206302:36-206302:44",
"206302:47-206302:82", "206302:84-206302:108", "206302:110-206302:149", "206302:151-206302:186", "206302:189-206302:229",
"206302:231-206302:232", "206302:234-206302:241", "206302:243-206302:276", "206303:1-206303:19", "206303:23-206303:286",
"206304:1-206304:4", "206304:6-206304:62", "206331:91-206331:222", "206331:225-206331:312", "206389:88-206389:185",
"206389:187-206389:249", "206389:252-206389:272", "206389:275-206389:392", "206391:1-206391:55", "206391:57-206391:91",
"206401:69-206401:90", "206401:92-206401:194", "206401:197-206401:210", "206401:212-206401:249", "206401:251-206401:265",
"206401:267-206401:409", "206446:92-206446:141", "206446:143-206446:159", "206446:162-206446:205", "206446:208-206446:301",
"206446:304-206446:442", "206446:445", "206446:448-206446:474", "206446:476-206446:616", "206446:619-206446:872",
"206446:874-206446:910", "206446:912-206446:948", "206446:950-206446:989", "206446:992-206446:1030", "206446:1033-206446:1075",
"206446:1109-206446:1149", "206448:1-206448:143", "206448:145-206448:559", "206448:561-206448:1170", "206448:1173-206448:1231",
"206448:1235-206448:1237", "206466:24-206466:137", "206466:140-206466:277", "206466:280-206466:296", "206466:299-206466:303",
"206466:306-206466:405", "206466:407-206466:419", "206466:422-206466:477", "206466:480-206466:511", "206466:514-206466:676",
"206476:73-206476:129", "206476:133-206476:137", "206476:140-206476:141", "206476:143-206476:219", "206477:1-206477:14",
"206477:16-206477:31", "206477:33-206477:41", "206477:44-206477:51", "206477:53-206477:70", "206477:73-206477:75",
"206477:77-206477:89", "206477:91-206477:94", "206477:97-206477:115", "206477:118-206477:184", "206478:1-206478:27",
"206478:29-206478:136", "206478:139-206478:144", "206484:73-206484:95", "206484:98-206484:133", "206484:136-206484:163",
"206484:166-206484:186", "206484:189-206484:384", "206484:387-206484:463", "206484:465-206484:551", "206484:554",
"206484:556-206484:669", "206512:91-206512:123", "206512:125-206512:133", "206512:136-206512:161", "206512:163-206512:190",
"206512:193-206512:201", "206512:203-206512:212", "206512:214-206512:332", "206512:334-206512:584", "206512:587-206512:604",
"206512:607-206512:1005", "206512:1008-206512:1123", "206512:1126-206512:1163", "206512:1165-206512:1211", "206513:3-206513:39",
"206513:42-206513:188", "206513:191-206513:234", "206513:237-206513:238", "206513:241-206513:323", "206542:1-206542:115",
"206542:117-206542:165", "206542:168-206542:511", "206542:514-206542:547", "206542:550-206542:603", "206542:606-206542:668",
"206542:671-206542:727", "206542:730-206542:739", "206542:741-206542:833", "206550:77-206550:132", "206550:135-206550:144",
"206572:37-206572:47", "206573:2-206573:14", "206574:1-206574:87", "206575:1-206575:7", "206575:10",
"206575:12-206575:69", "206594:72-206594:107", "206594:110-206594:246", "206594:249-206594:281", "206595:1-206595:34",
"206595:37-206595:42", "206595:45-206595:193", "206596:1-206596:13", "206596:15-206596:220", "206596:222-206596:228",
"206596:231-206596:236", "206596:239-206596:292", "206596:295-206596:695", "206596:697-206596:728", "206596:730-206596:810",
"206598:1-206598:81", "206598:83-206598:103", "206598:105-206598:588", "206598:591-206598:657", "206598:659-206598:719",
"206605:1-206605:36", "206605:39-206605:78", "206744:49-206744:157", "206744:160-206744:192", "206744:195-206744:395",
"206744:398-206744:452", "206745:1-206745:81", "206745:84-206745:199", "206745:202-206745:224", "206745:227-206745:237",
"206745:240-206745:304", "206745:306-206745:318", "206745:321-206745:720", "206745:723-206745:796", "206745:799-206745:894",
"206745:897-206745:944", "206745:946-206745:1106", "206745:1108-206745:1524", "206745:1527-206745:1862", "206745:1988-206745:1996",
"206859:79-206859:210", "206859:212-206859:258", "206859:260-206859:323", "206859:325-206859:356", "206859:359-206859:609",
"206859:612-206859:681", "206859:684-206859:732", "206859:734-206859:768", "206859:771-206859:808", "206859:811-206859:827",
"206859:830-206859:848", "206866:1-206866:30", "206866:33-206866:113", "206866:115-206866:274", "206868:1-206868:3",
"206868:10-206868:16", "206869:1-206869:251", "206869:253-206869:271", "206869:274-206869:502", "206869:507-206869:520",
"206869:522-206869:566", "206869:568-206869:752", "206897:1-206897:34", "206897:38-206897:61", "206897:63-206897:102",
"206897:109", "206897:111-206897:112", "206897:114-206897:131", "206897:133-206897:137", "206901:1-206901:98",
"206906:1-206906:31", "206906:38-206906:94", "206906:96-206906:136", "206906:138-206906:139", "206906:142-206906:149",
"206906:151-206906:175", "206906:177-206906:206", "206940:1-206940:151", "206940:153", "206940:155-206940:298",
"206940:301-206940:382", "206940:384-206940:712", "206940:715-206940:803", "206940:805-206940:960", "206940:963-206940:1027",
"207099:83-207099:134", "207099:137-207099:172", "207099:175-207099:213", "207099:216-207099:314", "207099:316-207099:320",
"207099:323-207099:330", "207099:333-207099:367", "207099:370-207099:481", "207099:484-207099:602", "207099:605-207099:755",
"207099:757-207099:1046", "207099:1048-207099:1171", "207100:1-207100:91", "207100:94", "207214:57-207214:112",
"207214:114-207214:177", "207214:179-207214:181", "207214:184-207214:196", "207214:199-207214:220", "207214:223-207214:262",
"207214:265-207214:405", "207214:408-207214:482", "207214:485-207214:640", "207214:643-207214:708", "207214:718-207214:757",
"207214:759-207214:808", "207214:811-207214:829", "207217:1-207217:32", "207219:1-207219:112", "207220:1-207220:160",
"207221:1-207221:102", "207222:1-207222:17", "207222:20-207222:289", "207231:70-207231:84", "207231:86-207231:121",
"207231:123-207231:184", "207231:187-207231:189", "207231:192-207231:303", "207231:306-207231:354", "207231:357-207231:481",
"207231:484-207231:504", "207231:508-207231:549", "207231:552-207231:626", "207231:628-207231:690", "207231:693-207231:875",
"207231:878-207231:1000", "207231:1003-207231:1170", "207231:1173-207231:1187", "207231:1189-207231:1227", "207231:1229-207231:1415",
"207231:1418-207231:1445", "207231:1447-207231:1505", "207233:1-207233:119", "207233:121-207233:148", "207269:80-207269:394",
"207269:397-207269:436", "207269:439-207269:463", "207269:466-207269:551", "207269:568-207269:577", "207273:3-207273:877",
"207279:68-207279:138", "207279:141-207279:149", "207279:151-207279:237", "207279:240-207279:266", "207279:269-207279:307",
"207279:309-207279:416", "207279:498-207279:551", "207279:554-207279:640", "207279:643-207279:961", "207279:963-207279:1095",
"207279:1098-207279:1160", "207320:1-207320:110", "207320:112-207320:350", "207371:72-207371:117", "207371:120-207371:124",
"207372:1-207372:27", "207372:30-207372:113", "207372:116-207372:154", "207372:156-207372:174", "207372:176-207372:478",
"207372:480-207372:496", "207397:32-207397:77", "207397:80-207397:140", "207397:143-207397:179", "207398:1-207398:14",
"207398:16-207398:33", "207454:79-207454:95", "207454:98-207454:123", "207454:126-207454:259", "207454:261-207454:363",
"207454:365-207454:458", "207454:461-207454:498", "207454:501-207454:609", "207454:612-207454:632", "207454:635-207454:781",
"207454:784-207454:866", "207454:869-207454:974", "207454:977-207454:1064", "207454:1067-207454:1079", "207454:1081-207454:1321",
"207454:1323-207454:1464", "207454:1467-207454:1569", "207454:1571-207454:1604", "207454:1607-207454:1712", "207454:1714-207454:1988",
"207469:1-207469:31", "207469:34-207469:45", "207477:76-207477:104", "207477:107-207477:111", "207477:114-207477:147",
"207477:150-207477:295", "207477:298-207477:483", "207477:486-207477:494", "207477:497-207477:527", "207477:530-207477:563",
"207477:565-207477:570", "207487:50-207487:98", "207487:101-207487:311", "207487:313-207487:359", "207487:363-207487:468",
"207487:471-207487:472", "207488:1-207488:63", "207488:66-207488:92", "207488:95-207488:113", "207488:116-207488:198",
"207488:200-207488:250", "207488:252-207488:288", "207488:291-207488:365", "207488:368-207488:377", "207488:379-207488:440",
"207490:1-207490:48", "207490:51-207490:111", "207491:1-207491:176", "207491:179-207491:458", "207492:1-207492:20",
"207492:23-207492:298", "207515:79-207515:109", "207515:112-207515:132", "207515:134-207515:208", "207515:211-207515:225",
"207515:228-207515:320", "207515:322-207515:381", "207515:383-207515:498", "207515:500-207515:730", "207515:733-207515:849",
"207515:851-207515:954", "207515:957-207515:994", "207515:997-207515:1052", "207515:1055-207515:1143", "207515:1145-207515:1211",
"207517:1-207517:12", "207517:15-207517:57", "207518:1-207518:59", "207518:61-207518:83", "207882:22-207882:45",
"207883:1", "207883:3-207883:4", "207883:7-207883:75", "207884:1-207884:106", "207884:108-207884:183",
"207885:1-207885:90", "207886:1-207886:30", "207886:32-207886:90", "207886:92-207886:156", "207886:158-207886:166",
"207886:168-207886:171", "207889:1-207889:43", "207889:47-207889:57", "207889:60-207889:303", "207889:306-207889:442",
"207889:445", "207889:447-207889:551", "207889:553-207889:731", "207889:733-207889:907", "207889:910-207889:945",
"207898:1-207898:33", "207898:36-207898:57", "207898:60-207898:235", "207898:239-207898:257", "207898:260-207898:277",
"207905:75-207905:196", "207905:198-207905:281", "207905:284-207905:329", "207905:331-207905:402", "207905:404-207905:565",
"207905:568-207905:672", "207905:675-207905:805", "207905:807-207905:850", "207905:852-207905:861", "207905:864-207905:884",
"207905:886-207905:1180", "207905:1183-207905:1283", "207905:1285-207905:1331", "207905:1333-207905:1515", "207905:1518-207905:1734",
"207905:1737-207905:1796", "207920:84-207920:146", "207920:149-207920:241", "207920:243-207920:261", "207920:264-207920:291",
"207920:294-207920:486", "207920:489-207920:518", "207920:520-207920:598", "207920:600-207920:708", "207920:710-207920:826",
"207921:1-207921:37", "207921:40-207921:58", "207922:1-207922:69", "207922:71-207922:100", "207922:103-207922:126",
"207922:129-207922:242", "207922:274-207922:291", "207924:1-207924:52", "207924:54-207924:171", "207924:173-207924:178",
"207924:181-207924:339", "208307:2-208307:42", "208307:45", "208307:47-208307:70", "208307:72-208307:147",
"208307:150-208307:252", "208307:256-208307:259", "208307:262-208307:275", "208307:278-208307:342", "208307:345-208307:450",
"208307:453-208307:527", "208307:530-208307:583", "208307:586-208307:605", "208307:608-208307:616", "208307:618-208307:667",
"208307:670-208307:761", "208307:763-208307:798", "208307:800-208307:889", "208307:891-208307:893", "208307:896-208307:1055",
"208307:1057-208307:1205", "208307:1208-208307:1294", "208307:1297-208307:1328", "208339:77-208339:89", "208339:91-208339:122",
"208339:125-208339:208", "208339:211-208339:346", "208339:349-208339:363", "208341:1-208341:84", "208341:87-208341:117",
"208341:120-208341:513", "208341:515-208341:685", "208341:688-208341:693", "208341:695-208341:775", "208341:777-208341:824",
"208351:83-208351:97", "208351:100-208351:356", "208351:359-208351:367", "208351:369", "208352:1-208352:15",
"208352:17", "208352:19", "208353:1-208353:76", "208353:78-208353:269", "208353:271-208353:348",
"208357:1-208357:70", "208357:73-208357:507", "208390:72-208390:128", "208390:130-208390:169", "208391:52-208391:82",
"208391:84-208391:162", "208391:164-208391:216", "208391:219-208391:493", "208391:495-208391:498", "208391:500-208391:523",
"208391:526-208391:533", "208391:535-208391:588", "208391:591-208391:660", "208391:663-208391:869", "208427:49-208427:89",
"208427:92-208427:161", "208427:164", "208427:166-208427:173", "208427:175-208427:268", "208427:271-208427:312",
"208427:315", "208427:317-208427:335", "208427:337-208427:361", "208427:364-208427:402", "208427:404-208427:422",
"208427:425-208427:577", "208427:580-208427:647", "208428:1-208428:58", "208428:61-208428:68", "208428:70-208428:156",
"208428:159-208428:227", "208429:1-208429:56", "208429:59-208429:139", "208429:141-208429:159", "208429:162-208429:237",
"208429:240-208429:440", "208429:442-208429:452", "208429:455-208429:589", "208429:592-208429:712", "208429:715-208429:922",
"208487:2-208487:26", "208487:29-208487:159", "208487:161-208487:307", "208487:309-208487:459", "208487:462-208487:476",
"208487:479-208487:621", "208509:71-208509:232", "208538:2-208538:43", "208540:1-208540:26", "208540:29-208540:98",
"208541:1-208541:57", "208541:59-208541:173", "208541:175-208541:376", "208541:378-208541:413", "208551:119-208551:193",
"208551:195-208551:212", "208551:215-208551:300", "208551:303-208551:354", "208551:356-208551:554", "208551:557-208551:580",
"208686:73-208686:79", "208686:82-208686:181", "208686:183-208686:224", "208686:227-208686:243", "208686:246-208686:311",
"208686:313-208686:459" ) ),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012D_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_146.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012D_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_147.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012D_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_148.root')
)
| [
"[email protected]"
] | |
4581a71677876a19591feba186db5b19a914147a | 1a7595a5896ca709eb98805b2a570bf12775a9ff | /muonShieldOptimization/study_muEloss.py | 56d16036361c9cb08313151c37f2833ff4938c81 | [] | no_license | nathandpenha/CERN-FairShip | 953683117f4971b323392bc1213b7ae7d9a3a708 | 10db3d519a5ac8fd67132afd39736b550cb60a30 | refs/heads/master | 2021-05-24T10:10:11.763338 | 2020-05-06T18:46:14 | 2020-05-06T18:46:14 | 261,848,065 | 2 | 0 | null | 2020-05-06T18:47:30 | 2020-05-06T18:39:22 | C++ | UTF-8 | Python | false | false | 10,048 | py | #!/usr/bin/env python
import ROOT,os,sys,getopt,time,shipRoot_conf
ROOT.gROOT.ProcessLine('#include "FairModule.h"')
time.sleep(20)
import shipunit as u
from ShipGeoConfig import ConfigRegistry
mcEngine = "TGeant4"
runnr = 1
nev = 1000000
setup = {}
#
setup['NA62'] = {'thickness': 125*u.cm/2., 'material':'krypton','momentum': 10*u.GeV,'maxTheta':350*u.GeV} # 3000s for 5M
# rad length 4.71cm 125/4.71 = 27
# https://indico.in2p3.fr/event/420/contributions/29860/attachments/24033/29479/moriond.pdf
setup['ATLAS'] = {'thickness': 172*u.cm/2., 'material':'iron','momentum': 350*u.GeV,'maxTheta':350*u.GeV} # 3000s for 5M
# atlas testbeam http://cds.cern.ch/record/1123152/files/CERN-THESIS-2008-070.pdf?version=1
# LArEM ~24X0 TileCal 4 compartments, same size LiqAr rad length 14cm
# http://cds.cern.ch/record/1263861/files/ATL-CAL-PUB-2010-001.pdf tile cal mainly iron, LAr 1.35 DM 0.63 TileCal 8.18
# iron intlen 16.97 -> (1.35 + 0.63 + 8.18)*16.97
setup['Fig3'] = {'thickness': 0.1*u.cm, 'material':'lead','momentum': 2*u.GeV,'maxTheta':0.2}
setup['Fig4'] = {'thickness': 0.1*u.cm, 'material':'lead','momentum': 8*u.GeV,'maxTheta':0.04}
setup['Fig5'] = {'thickness': 0.1*u.cm, 'material':'lead','momentum': 14*u.GeV,'maxTheta':0.02}
setup['Fig6'] = {'thickness': 1.44*u.cm, 'material':'copper','momentum': 11.7*u.GeV,'maxTheta':0.045}
setup['Fig7'] = {'thickness': 1.44*u.cm, 'material':'copper','momentum': 7.3*u.GeV,'maxTheta':0.045}
s = sys.argv[1]
thickness = setup[s]['thickness']
material = setup[s]['material']
momentum = setup[s]['momentum']
maxTheta = setup[s]['maxTheta']
checkOverlap = True
storeOnlyMuons = True
outFile = "msc"+s+".root"
theSeed = 0
ecut = 0.0
import rootUtils as ut
h={}
def run():
# -------------------------------------------------------------------
ROOT.gRandom.SetSeed(theSeed) # this should be propagated via ROOT to Pythia8 and Geant4VMC
shipRoot_conf.configure() # load basic libraries, prepare atexit for python
# ship_geo = ConfigRegistry.loadpy("$FAIRSHIP/geometry/geometry_config.py", Yheight = 10, tankDesign = 5, muShieldDesign = 7, nuTauTargetDesign=1)
# -----Timer--------------------------------------------------------
timer = ROOT.TStopwatch()
timer.Start()
# -----Create simulation run----------------------------------------
gFairBaseContFact = ROOT.FairBaseContFact() # required by change to FairBaseContFact to avoid TList::Clear errors
run = ROOT.FairRunSim()
run.SetName(mcEngine) # Transport engine
if nev==0: run.SetOutputFile("dummy.root")
else: run.SetOutputFile(outFile) # Output file
run.SetUserConfig("g4Config.C") # user configuration file default g4Config.C
rtdb = run.GetRuntimeDb()
# -----Materials----------------------------------------------
run.SetMaterials("media.geo")
# -----Create geometry----------------------------------------------
cave= ROOT.ShipCave("CAVE")
cave.SetGeometryFileName("cave.geo")
run.AddModule(cave)
#
target = ROOT.simpleTarget()
material, thickness, 0
#
target.SetEnergyCut(ecut*u.GeV)
if storeOnlyMuons: target.SetOnlyMuons()
target.SetParameters(material,thickness,0.)
run.AddModule(target)
#
primGen = ROOT.FairPrimaryGenerator()
myPgun = ROOT.FairBoxGenerator(13,1) # pdg id and multiplicity
if s=="NA62": myPgun.SetPRange(momentum,maxTheta)
else: myPgun.SetPRange(momentum-0.01,momentum+0.01)
myPgun.SetPhiRange(0,0) # // Azimuth angle range [degree]
myPgun.SetThetaRange(0,0) # // Polar angle in lab system range [degree]
myPgun.SetXYZ(0.*u.cm, 0.*u.cm, -1.*u.mm - (thickness) )
primGen.AddGenerator(myPgun)
#
run.SetGenerator(primGen)
# -----Initialize simulation run------------------------------------
run.Init()
if nev==0: return
gMC = ROOT.TVirtualMC.GetMC()
fStack = gMC.GetStack()
fStack.SetMinPoints(1)
fStack.SetEnergyCut(-1.)
# -----Start run----------------------------------------------------
print "run for ",nev,"events"
run.Run(nev)
# -----Start Analysis---------------
ROOT.gROOT.ProcessLine('#include "Geant4/G4EmParameters.hh"')
emP = ROOT.G4EmParameters.Instance()
emP.Dump()
h['f']= ROOT.gROOT.GetListOfFiles()[0].GetName()
# -----Finish-------------------------------------------------------
timer.Stop()
rtime = timer.RealTime()
ctime = timer.CpuTime()
print ' '
print "Macro finished succesfully."
print "Output file is ", outFile
print "Real time ",rtime, " s, CPU time ",ctime,"s"
def makePlot(f,book=True):
# print interaction and radiation length of target
sGeo=ROOT.gGeoManager
if sGeo:
v = sGeo.FindVolumeFast('target')
m = v.GetMaterial()
length = v.GetShape().GetDZ()*2
print "Material:",m.GetName(),'total interaction length=',length/m.GetIntLen(),'total rad length=',length/m.GetRadLen()
else:
density= 2.413
length= 125.0
print "Use predefined values:",density,length
if book:
ut.bookHist(h,'theta','scattering angle '+str(momentum)+'GeV/c;{Theta}(rad)',500,0,maxTheta)
ut.bookHist(h,'eloss','rel energy loss as function of momentum GeV/c',100,0,maxTheta,10000,0.,1.)
ut.bookHist(h,'elossRaw','energy loss as function of momentum GeV/c',100,0,maxTheta, 10000,0.,100.)
sTree = f.cbmsim
for n in range(sTree.GetEntries()):
rc = sTree.GetEvent(n)
Ein = sTree.MCTrack[0].GetEnergy()
M = sTree.MCTrack[0].GetMass()
Eloss = 0
for aHit in sTree.vetoPoint:
Eloss+=aHit.GetEnergyLoss()
print Ein,Eloss/Ein
rc = h['eloss'].Fill(Ein,Eloss/Ein)
rc = h['elossRaw'].Fill(Ein,Eloss)
ut.bookCanvas(h,key=s,title=s,nx=900,ny=600,cx=1,cy=1)
tc = h[s].cd(1)
if s=="NA62":
h['eloss'].Draw()
h['95'] = h['eloss'].ProjectionX('95',96,100)
h['95'].Sumw2()
h['0'] = h['eloss'].ProjectionX('0',1,100)
h['0'].Sumw2()
rc = h['95'].Divide(h['0'] )
h['95'].Draw()
h['meanEloss'] = h['elossRaw'].ProjectionX()
for n in range(1,h['elossRaw'].GetNbinsX()+1):
tmp = h['elossRaw'].ProjectionY('tmp',n,n)
eloss = tmp.GetMean()
h['meanEloss'].SetBinContent(n,eloss/density/length*1000)
h['meanEloss'].SetTitle('mean energy loss MeV cm2 / g')
h['meanEloss'].Draw()
elif s=="ATLAS":
h['eloss'].Draw()
h['>eloss']=h['eloss'].ProjectionY().Clone('>eloss')
cum = 0
N = float(h['>eloss'].GetEntries())
for n in range(h['>eloss'].GetNbinsX(),0,-1):
cum+=h['>eloss'].GetBinContent(n)
h['>eloss'].SetBinContent(n,cum/N)
print "Ethreshold event fraction in %"
for E in [15.,20.,30.,50.,80.]:
n = h['>eloss'].FindBin(E/350.)
print " %5.0F %5.2F "%(E,h['>eloss'].GetBinContent(n)*100)
else:
tc.SetLogy(1)
h['theta_100']=h['theta'].Clone('theta_100')
h['theta_100']=h['theta'].Rebin(5)
h['theta_100'].Scale(1./h['theta_100'].GetMaximum())
h['theta_100'].Draw()
h[s].Print(s+'.png')
h[s].Print(s+'.root')
f.Write(h['theta'].GetName())
f.Write(h['theta_100'].GetName())
def readChain():
tmp = "/mnt/hgfs/microDisk/Data/mscNA62_X.root"
for i in [0,1]:
f = ROOT.TFile(tmp.replace('X',str(i)))
if i==1: makePlot(f,False)
else: makePlot(f)
def NA62():
na62Points = open('NA62.points')
allPoints = na62Points.readlines()
N = int((len(allPoints)-1)/3.)
h['NA62']=ROOT.TGraphErrors(N)
for l in range(N):
tmp = allPoints[3*l].split(',')
x=float(tmp[0])
y=float(tmp[1].replace('\n',''))
tmp = allPoints[3*l+1].split(',')
y1=float(tmp[1].replace('\n',''))
tmp = allPoints[3*l+2].split(',')
y2=float(tmp[1].replace('\n',''))
h['NA62'].SetPoint(l,x,y*1E-6)
h['NA62'].SetPointError(l,0,abs(y1-y2)/2.*1E-6)
h['NA62'].SetLineColor(ROOT.kRed)
h['NA62'].SetMarkerColor(ROOT.kRed)
h['NA62'].SetMarkerStyle(20)
def makeSummaryPlot():
# using data in /mnt/hgfs/microDisk/Data/eloss/eloss_sum.root
# krypton total interaction length= 1.97246306079 total rad length= 26.5231000393
pdg={10.0:1.914,14.0:1.978,20.0:2.055,30.0:2.164,40.0:2.263,80.0:2.630,100.:2.810,140.:3.170,200.:3.720,277.:4.420,300.:4.631,400.:5.561}
h['Gpdg'] = ROOT.TGraph(len(pdg))
Gpdg = h['Gpdg']
Gpdg.SetMarkerColor(ROOT.kRed)
Gpdg.SetMarkerStyle(20)
keys = pdg.keys()
keys.sort()
for n in range(len(keys)):
Gpdg.SetPoint(n,keys[n],pdg[keys[n]])
density= 2.413
length= 125.0
ut.readHists(h,"/mnt/hgfs/microDisk/Data/eloss/eloss_sum.root")
ut.readHists(h,"/mnt/hgfs/microDisk/Data/eloss/eloss_withRaw.root")
ut.bookCanvas(h,key='summary',title=" ",nx=1200,ny=600,cx=2,cy=1)
tc = h['summary'].cd(1)
h['0'] = h['eloss'].ProjectionX('0',1,h['eloss'].GetNbinsY())
h['0'].Sumw2()
NA62()
for t in [93,95]:
h[t] = h['eloss'].ProjectionX(str(t),int(h['eloss'].GetNbinsY()*t/100.),h['eloss'].GetNbinsY())
h[t].Sumw2()
h[t].SetStats(0)
h[t].SetMarkerStyle(24)
rc = h[t].Divide(h['0'] )
h[t].Rebin(2)
h[t].Scale(1./2.)
if t!=93:
h[t].SetMarkerColor(ROOT.kBlue)
h[t].Draw('same')
else:
h[t].SetMaximum(1E-5)
h[t].SetMarkerColor(ROOT.kMagenta)
h[t].SetXTitle('incoming muon momentum [GeV/c]')
h[t].SetYTitle('prob #DeltaE>X%')
h[t].SetTitle('')
h[t].Draw()
h['NA62'].Draw('sameP')
h['lg'] = ROOT.TLegend(0.53,0.79,0.98,0.94)
h['lg'].AddEntry(h['NA62'],'NA62 measurement >95%','PL')
h['lg'].AddEntry(h[95],'FairShip >95%','PL')
h['lg'].AddEntry(h[93],'FairShip >93%','PL')
h['lg'].Draw()
tc = h['summary'].cd(2)
h['meanEloss'] = h['elossRaw'].ProjectionX()
for n in range(1,h['elossRaw'].GetNbinsX()+1):
tmp = h['elossRaw'].ProjectionY('tmp',n,n)
eloss = tmp.GetMean()
h['meanEloss'].SetBinContent(n,eloss/density/length*1000)
h['meanEloss'].SetBinError(n,0)
h['meanEloss'].SetTitle('mean energy loss MeV cm^{2}/g')
h['meanEloss'].SetStats(0)
h['meanEloss'].SetMaximum(7.)
h['meanEloss'].SetXTitle('incoming muon momentum [GeV/c]')
h['meanEloss'].SetYTitle('mean energy loss [MeV cm^[2]]/g')
h['meanEloss'].SetTitle('')
h['meanEloss'].Draw()
Gpdg.Draw('sameP')
h['lg2'] = ROOT.TLegend(0.53,0.79,0.98,0.94)
h['lg2'].AddEntry(h['Gpdg'],'muon dE/dx, PDG ','PL')
h['lg2'].AddEntry(h['meanEloss'],'energy deposited in krypton, FairShip','PL')
h['lg2'].Draw()
h['summary'].Print('catastrophicEnergyLoss.png')
| [
"[email protected]"
] | |
7cffd984d55e0708e92416f0d126056f75c33470 | ec062c479c09ce250c3e23ff47f144f423b55648 | /py/Lib/site-packages/azure/mgmt/compute/compute/v2016_04_30_preview/models/virtual_machine_paged.py | f4ce0dcbeaf516525bd3f7441a2a98148efea77a | [] | no_license | betisb/InputParser | c442ffc877a941bd5b7aac4d843a4d21594d8e96 | 68747d69e04d126f7ea679f93a291a6de244a95f | refs/heads/master | 2021-07-13T05:05:19.479329 | 2019-05-28T16:56:53 | 2019-05-28T16:56:53 | 188,087,891 | 0 | 2 | null | 2020-07-24T00:14:31 | 2019-05-22T17:52:13 | Python | UTF-8 | Python | false | false | 978 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class VirtualMachinePaged(Paged):
"""
A paging container for iterating over a list of :class:`VirtualMachine <azure.mgmt.compute.compute.v2016_04_30_preview.models.VirtualMachine>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[VirtualMachine]'}
}
def __init__(self, *args, **kwargs):
super(VirtualMachinePaged, self).__init__(*args, **kwargs)
| [
"[email protected]"
] | |
8fe088fdb84933fc73e957b74a89bbc398332e43 | 1fe8b4a22ba986e06788c31237171f986d5b440e | /uploader/admin.py | 3d8d57f43d4ba6b1119c84b3d84eaf416bfb5bec | [] | no_license | whitews/StickyTempFile | 0f654bafe23b6e4e104fe76368cd642c30f918d5 | 1812fa2c669c2128e13c9533b5ea6a97e5449e3e | refs/heads/master | 2021-01-19T05:29:59.049931 | 2013-03-07T21:36:11 | 2013-03-07T21:36:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | from uploader.models import *
from django.contrib import admin
admin.site.register(UploadedFile)
| [
"[email protected]"
] | |
3017eff3a8d21fac6867ed2bc8da08b705f9d229 | cfc415c9b247521b872bf86fd22b55b4a3ff2ee3 | /tensorflow/tools/compatibility/tf_upgrade_v2_test.py | 4b83d50036b6c4e9572b40d7b6377685f94dacc8 | [
"Apache-2.0"
] | permissive | chengmengli06/tensorflow | f7fdb51d709e87b302d60a6dc9391cb6bbaaa3e1 | e81d0c5499eab1ae2d301c5caa128e0b69b0289b | refs/heads/master | 2021-06-24T21:54:28.571878 | 2018-11-16T06:45:48 | 2018-11-16T06:45:48 | 157,813,648 | 0 | 0 | Apache-2.0 | 2018-11-16T04:42:57 | 2018-11-16T04:42:57 | null | UTF-8 | Python | false | false | 6,225 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf 2.0 upgrader."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import six
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test as test_lib
from tensorflow.tools.compatibility import ast_edits
from tensorflow.tools.compatibility import tf_upgrade_v2
class TestUpgrade(test_util.TensorFlowTestCase):
"""Test various APIs that have been changed in 2.0.
We also test whether a converted file is executable. test_file_v1_10.py
aims to exhaustively test that API changes are convertible and actually
work when run with current TensorFlow.
"""
def _upgrade(self, old_file_text):
in_file = six.StringIO(old_file_text)
out_file = six.StringIO()
upgrader = ast_edits.ASTCodeUpgrader(tf_upgrade_v2.TFAPIChangeSpec())
count, report, errors = (
upgrader.process_opened_file("test.py", in_file,
"test_out.py", out_file))
return count, report, errors, out_file.getvalue()
def testParseError(self):
_, report, unused_errors, unused_new_text = self._upgrade(
"import tensorflow as tf\na + \n")
self.assertTrue(report.find("Failed to parse") != -1)
def testReport(self):
text = "tf.assert_near(a)\n"
_, report, unused_errors, unused_new_text = self._upgrade(text)
# This is not a complete test, but it is a sanity test that a report
# is generating information.
self.assertTrue(report.find("Renamed function `tf.assert_near` to "
"`tf.debugging.assert_near`"))
def testRename(self):
text = "tf.conj(a)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.math.conj(a)\n")
text = "tf.rsqrt(tf.log_sigmoid(3.8))\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.math.rsqrt(tf.math.log_sigmoid(3.8))\n")
def testRenameConstant(self):
text = "tf.MONOLITHIC_BUILD\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.sysconfig.MONOLITHIC_BUILD\n")
text = "some_call(tf.MONOLITHIC_BUILD)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "some_call(tf.sysconfig.MONOLITHIC_BUILD)\n")
def testRenameArgs(self):
text = ("tf.nn.pool(input_a, window_shape_a, pooling_type_a, padding_a, "
"dilation_rate_a, strides_a, name_a, data_format_a)\n")
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text,
("tf.nn.pool(input=input_a, window_shape=window_shape_a,"
" pooling_type=pooling_type_a, padding=padding_a, "
"dilations=dilation_rate_a, strides=strides_a, "
"name=name_a, data_format=data_format_a)\n"))
def testReorder(self):
text = "tf.boolean_mask(a, b, c, d)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text,
"tf.boolean_mask(tensor=a, mask=b, name=c, axis=d)\n")
def testLearningRateDecay(self):
for decay in ["tf.train.exponential_decay", "tf.train.piecewise_constant",
"tf.train.polynomial_decay", "tf.train.natural_exp_decay",
"tf.train.inverse_time_decay", "tf.train.cosine_decay",
"tf.train.cosine_decay_restarts",
"tf.train.linear_cosine_decay",
"tf.train.noisy_linear_cosine_decay"]:
text = "%s(a, b)\n" % decay
_, report, errors, new_text = self._upgrade(text)
self.assertEqual(text, new_text)
self.assertEqual(errors, ["test.py:1: %s requires manual check." % decay])
self.assertIn("%s has been changed" % decay, report)
def testEstimatorLossReductionChange(self):
classes = [
"LinearClassifier", "LinearRegressor", "DNNLinearCombinedClassifier",
"DNNLinearCombinedRegressor", "DNNRegressor", "DNNClassifier",
"BaselineClassifier", "BaselineRegressor"
]
for c in classes:
ns = "tf.estimator." + c
text = ns + "(a, b)"
_, report, errors, new_text = self._upgrade(text)
self.assertEqual(text, new_text)
self.assertEqual(errors, ["test.py:1: %s requires manual check." % ns])
self.assertIn("loss_reduction has been changed", report)
def testCountNonZeroChanges(self):
text = (
"tf.math.count_nonzero(input_tensor=input, dtype=dtype, name=name, "
"reduction_indices=axis, keep_dims=keepdims)\n"
)
_, unused_report, unused_errors, new_text = self._upgrade(text)
expected_text = (
"tf.math.count_nonzero(input=input, dtype=dtype, name=name, "
"axis=axis, keepdims=keepdims)\n"
)
self.assertEqual(new_text, expected_text)
class TestUpgradeFiles(test_util.TensorFlowTestCase):
def testInplace(self):
"""Check to make sure we don't have a file system race."""
temp_file = tempfile.NamedTemporaryFile("w", delete=False)
original = "tf.conj(a)\n"
upgraded = "tf.math.conj(a)\n"
temp_file.write(original)
temp_file.close()
upgrader = ast_edits.ASTCodeUpgrader(tf_upgrade_v2.TFAPIChangeSpec())
upgrader.process_file(temp_file.name, temp_file.name)
self.assertAllEqual(open(temp_file.name).read(), upgraded)
os.unlink(temp_file.name)
if __name__ == "__main__":
test_lib.main()
| [
"[email protected]"
] | |
80810bf8538a097220492556fb02df2122426b9e | e4007870b4d75ba23c2f12ac6646f272cf17865c | /FFMPEG_Scripts/Video_Drawer.py | ff79049fa690bf27f94f3a7db415cde233945c49 | [
"MIT"
] | permissive | knut0815/PythonUtility | 385ce332ff34501be7ad21ac7948eb609770e72a | 0062e1e60dc151776b963d13bc4c1763eb90d333 | refs/heads/master | 2023-01-10T09:58:14.619531 | 2020-11-10T12:22:47 | 2020-11-10T12:22:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,420 | py | import sys
import platform
import subprocess
import os
horizontal_center = 'x=(w-tw)/2'
horizontal_right_margin = 'x=(w-tw)'
vertical_bottom_margin = 'y=h-(2*lh)'
class VideoDrawer(object):
@staticmethod
def _get_font_ifp():
if platform.system() == 'windows':
font_ifp = 'C:\\Windows\\Fonts\\Arial.ttf'
else:
font_ifp = '/usr/share/fonts/truetype/freefont/FreeMono.ttf'
return font_ifp
@staticmethod
def _get_font_ifp_option():
return 'fontfile=' + VideoDrawer._get_font_ifp()
@staticmethod
def _get_font_size_option(size):
return 'fontsize=' + str(size)
@staticmethod
def _get_color_option(color):
return 'fontcolor=' + color
@staticmethod
def _get_activate_box_option():
return 'box=1'
@staticmethod
def _get_box_color_option(color):
return 'boxcolor=' + color
@staticmethod
def _get_box_with_option(width):
return 'boxborderw=' + str(width)
@staticmethod
def _get_text_option(text):
return 'text=\'' + str(text) + '\''
@staticmethod
def _get_frame_number_text_option():
return 'text=\'%{frame_num}\''
@staticmethod
def _get_start_number_option(start_number):
return 'start_number=' + str(start_number)
@staticmethod
def _get_enable_between_option(start, end, values_in_frames=True):
# This option is used to show some string only in a specific subpart of the video
# http://ffmpeg.org/ffmpeg-all.html#Expression-Evaluation
# n: the number of current processed frame, starting from 0
# t: the number of current processed frame, starting from 0
if values_in_frames:
test_variable = 'n'
else:
test_variable = 't'
return 'enable=\'between(' + test_variable + ',' + str(start) + ',' + str(end) + ')\''
@staticmethod
def _create_colon_separated_draw_options(option_list):
option_str = ''
option_str += '"' # prepend quote
option_str += 'drawtext='
for ele in option_list[:-1]:
option_str += ele + ': '
option_str += option_list[-1]
option_str += '"' # append quote
return option_str
@staticmethod
def add_text_to_video(ifp,
ofp,
text_time_interval_triples_list=None,
add_frame_numbers=True):
options = ''
options += ' ' + '-i'
options += ' ' + ifp
options += ' ' + '-vf'
font_ifp_option = VideoDrawer._get_font_ifp_option()
x_pos_option = horizontal_center
y_pos_option = vertical_bottom_margin
font_color_option = VideoDrawer._get_color_option('black')
font_size_option = VideoDrawer._get_font_size_option(20)
active_box_option = VideoDrawer._get_activate_box_option()
box_color_option = VideoDrawer._get_box_color_option('green')
box_width_option = VideoDrawer._get_box_with_option(5)
if text_time_interval_triples_list is not None:
draw_text_options = ''
for index, text_with_time_stamp in enumerate(text_time_interval_triples_list):
text_option = VideoDrawer._get_text_option(text_with_time_stamp[0])
start = text_with_time_stamp[1]
end = text_with_time_stamp[2]
enable_between_option = VideoDrawer._get_enable_between_option(start, end)
single_draw_options = VideoDrawer._create_colon_separated_draw_options(
[font_ifp_option,
text_option,
enable_between_option,
x_pos_option,
y_pos_option,
font_color_option,
font_size_option,
active_box_option,
box_color_option,
box_width_option
])
if index > 0:
draw_text_options += ',' # draw commands must be comma separated
draw_text_options += single_draw_options
options += ' ' + draw_text_options
if add_frame_numbers:
frame_number_text_option = VideoDrawer._get_frame_number_text_option()
start_number_option = VideoDrawer._get_start_number_option(0)
x_pos_option = horizontal_right_margin
draw_options = VideoDrawer._create_colon_separated_draw_options(
[font_ifp_option,
frame_number_text_option,
start_number_option,
x_pos_option,
y_pos_option,
font_color_option,
font_size_option,
active_box_option,
box_color_option,
box_width_option
])
if text_time_interval_triples_list is not None:
options += ',' + draw_options # draw commands must be comma separated
else:
options += ' ' + draw_options
options += ' ' + '-c:a'
options += ' ' + 'copy'
call_str = 'ffmpeg' + ' ' + options + ' ' + ofp
print('call_str', call_str)
subprocess.call(call_str, shell=True)
# Make sure the file has been created
assert os.path.isfile(ofp)
| [
"[email protected]"
] | |
f36b312afc18e9f6b1941362c2dfbc66574e3deb | 98b63e3dc79c75048163512c3d1b71d4b6987493 | /tensorflow/python/keras/tests/memory_test.py | 465df84d6fef375a6f515ec1eb64815e4b74ec3f | [
"Apache-2.0"
] | permissive | galeone/tensorflow | 11a4e4a3f42f4f61a65b432c429ace00401c9cc4 | 1b6f13331f4d8e7fccc66bfeb0b066e77a2b7206 | refs/heads/master | 2022-11-13T11:56:56.143276 | 2020-11-10T14:35:01 | 2020-11-10T14:35:01 | 310,642,488 | 21 | 12 | Apache-2.0 | 2020-11-06T16:01:03 | 2020-11-06T16:01:02 | null | UTF-8 | Python | false | false | 2,599 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for memory leaks in eager execution.
It is possible that this test suite will eventually become flaky due to taking
too long to run (since the tests iterate many times), but for now they are
helpful for finding memory leaks since not all PyObject leaks are found by
introspection (test_util decorators). Please be careful adding new tests here.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import keras
from tensorflow.python.eager import backprop
from tensorflow.python.eager.memory_tests import memory_test_util
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class SingleLayerNet(keras.Model):
"""Simple keras model used to ensure that there are no leaks."""
def __init__(self):
super(SingleLayerNet, self).__init__()
self.fc1 = keras.layers.Dense(5)
def call(self, x):
return self.fc1(x)
class MemoryTest(test.TestCase):
def testMemoryLeakInSimpleModelForwardOnly(self):
if not memory_test_util.memory_profiler_is_available():
self.skipTest("memory_profiler required to run this test")
inputs = array_ops.zeros([32, 100], dtypes.float32)
net = SingleLayerNet()
def f():
with backprop.GradientTape():
net(inputs)
memory_test_util.assert_no_leak(f)
def testMemoryLeakInSimpleModelForwardAndBackward(self):
if not memory_test_util.memory_profiler_is_available():
self.skipTest("memory_profiler required to run this test")
inputs = array_ops.zeros([32, 100], dtypes.float32)
net = SingleLayerNet()
def f():
with backprop.GradientTape() as tape:
result = net(inputs)
tape.gradient(result, net.variables)
del tape
memory_test_util.assert_no_leak(f)
if __name__ == "__main__":
test.main()
| [
"[email protected]"
] | |
7eb105d6e6a9cab22984c6db01666070c56c508b | 2bf76e30ad517adf8805a9fdb22e60c4c010eea3 | /ipypandex/tests/echo_pandas.py | 4c35a62cf638ddd4aa4f4bf2ae5ef84c977c07cf | [
"BSD-3-Clause"
] | permissive | isabella232/ipypandex | 2be06d8be96280f110ffd063eb7f8c81a6d4dc8c | fc1023266a7e3e784595f296629f4fd827fb7d0f | refs/heads/main | 2023-02-11T20:15:02.731204 | 2021-01-06T00:41:44 | 2021-01-06T00:41:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | import pandas as pd
from IPython.utils.capture import capture_output
with capture_output() as c:
display(pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}))
print(c.outputs[0].data)
| [
"[email protected]"
] | |
aeb34e6f1e8723cc6424c196cb99ef779f507e4d | c2081f368428e5fb684e08863ecac4f37f5717e5 | /jobapplicant/wsgi.py | 045dbc1851268e7d082365cdb2495383f2d755be | [] | no_license | agimenezpy/jobapplicant | 9148e80e3e535f7ea956992ba9c7fc0ea472b0e8 | 99ac06464a9137061c89fea0389b7c95422c29f2 | refs/heads/master | 2020-06-05T08:48:25.222470 | 2013-10-04T00:42:33 | 2013-10-04T00:42:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | """
WSGI config for jobapplicant project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jobapplicant.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"[email protected]"
] | |
9be6c9b60a6871fc27eb6f3f9518c33a42785596 | c6c6c32547ba334f75a5cc938a9c07e708670365 | /buses/migrations/0002_alter_busbooking_bus_id.py | af6ea77ae0304b32d0b5ac41d86b6f261725998a | [] | no_license | wilsonmwiti/SmartTravel | e693acb0b323d1be9ae1c58917a32ef6a418448d | 9513f0f15745f9e73e70680c5d9e5798de85be7c | refs/heads/master | 2023-09-01T14:16:28.471037 | 2021-10-14T10:55:20 | 2021-10-14T10:55:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # Generated by Django 3.2.8 on 2021-10-13 05:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('buses', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='busbooking',
name='bus_id',
field=models.CharField(max_length=100),
),
]
| [
"[email protected]"
] | |
03a6cc2e483937f89f007060d6086be7425f4626 | 4e9d3ba19a694c25fdbfd4ed1c6ab66339674beb | /python/GafferUI/PopupWindow.py | b13e219fa456b6bdee63ed65695f9d5a99197b0f | [
"BSD-3-Clause"
] | permissive | mcanthony/gaffer | 0a6af7856b1c2ecae5620a9f2bd04316f2df271c | 32189357fda4bc4b2e5367a06af64928c479ffaf | refs/heads/master | 2021-01-18T19:59:29.212027 | 2015-10-26T20:43:45 | 2015-10-26T20:43:45 | 45,088,868 | 2 | 0 | null | 2015-10-28T04:30:06 | 2015-10-28T04:30:04 | null | UTF-8 | Python | false | false | 7,109 | py | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
QtCore = GafferUI._qtImport( "QtCore" )
QtGui = GafferUI._qtImport( "QtGui" )
class PopupWindow( GafferUI.Window ) :
def __init__( self, title="GafferUI.Window", borderWidth=8, child=None, sizeMode=GafferUI.Window.SizeMode.Automatic, closeOnLeave=False, **kw ) :
GafferUI.Window.__init__( self, title, borderWidth, child=child, sizeMode=sizeMode, **kw )
self._qtWidget().setWindowFlags( self._qtWidget().windowFlags() | QtCore.Qt.FramelessWindowHint | QtCore.Qt.Tool )
self._qtWidget().setAttribute( QtCore.Qt.WA_TranslucentBackground )
self._qtWidget().setMouseTracking( True )
self._qtWidget().paintEvent = Gaffer.WeakMethod( self.__paintEvent )
self._qtWidget().mousePressEvent = Gaffer.WeakMethod( self.__mousePressEvent )
self._qtWidget().mouseReleaseEvent = Gaffer.WeakMethod( self.__mouseReleaseEvent )
self._qtWidget().mouseMoveEvent = Gaffer.WeakMethod( self.__mouseMoveEvent )
self._qtWidget().enterEvent = Gaffer.WeakMethod( self.__enterEvent )
self._qtWidget().leaveEvent = Gaffer.WeakMethod( self.__leaveEvent )
# setVisible() will animate this to 1
self._qtWidget().setWindowOpacity( 0 )
self.__visibilityAnimation = None
self.__dragOffset = None
self.__cursor = None
self.setCloseOnLeave( closeOnLeave )
## Reimplemented from base class to make nice opacity animations
def setVisible( self, visible ) :
if visible == self.getVisible() :
return
self.__visibilityAnimation = _VisibilityAnimation( self._qtWidget(), visible )
self.__visibilityAnimation.start()
## Reimplemented from base class to account for nice opacity animations
def getVisible( self ) :
result = GafferUI.Window.getVisible( self )
# account for the fact that we might be animating towards invisibility
if self.__visibilityAnimation is not None and self.__visibilityAnimation.state() == self.__visibilityAnimation.Running :
if GafferUI._Variant.fromVariant( self.__visibilityAnimation.endValue() ) == 0 :
result = False
return result
def setCloseOnLeave( self, closeOnLeave ) :
self.__closeOnLeave = closeOnLeave
def getCloseOnLeave( self ) :
return self.__closeOnLeave
def __mousePressEvent( self, event ) :
if event.button() == QtCore.Qt.LeftButton :
if self.__cursor == QtCore.Qt.SizeFDiagCursor :
size = self._qtWidget().size()
self.__dragOffset = QtCore.QPoint( size.width(), size.height() ) - event.globalPos()
else :
self.__dragOffset = self._qtWidget().frameGeometry().topLeft() - event.globalPos()
def __mouseReleaseEvent( self, event ) :
if event.button() == QtCore.Qt.LeftButton :
self.__dragOffset = None
self.__setCursorFromPosition( event )
def __mouseMoveEvent( self, event ) :
if event.buttons() & QtCore.Qt.LeftButton and self.__dragOffset is not None :
if self.__cursor == QtCore.Qt.SizeFDiagCursor :
newSize = event.globalPos() + self.__dragOffset
self._qtWidget().resize( newSize.x(), newSize.y() )
else :
self._qtWidget().move( event.globalPos() + self.__dragOffset )
elif self.getResizeable() :
self.__setCursorFromPosition( event )
def __enterEvent( self, event ) :
if self.__closeOnLeave and self.__visibilityAnimation is not None :
if self.__visibilityAnimation.state() == self.__visibilityAnimation.Running :
# we currently visible, but we have an animation, so we must be
# in the process of becoming invisible. reverse that.
self.setVisible( True )
def __leaveEvent( self, event ) :
self.__setCursor( None )
if self.__closeOnLeave :
self.setVisible( False )
def __paintEvent( self, event ) :
painter = QtGui.QPainter( self._qtWidget() )
painter.setRenderHint( QtGui.QPainter.Antialiasing )
painter.setBrush( QtGui.QColor( 76, 76, 76 ) )
painter.setPen( QtGui.QColor( 0, 0, 0, 0 ) )
radius = self._qtWidget().layout().contentsMargins().left()
size = self.size()
painter.drawRoundedRect( QtCore.QRectF( 0, 0, size.x, size.y ), radius, radius )
if self.getResizeable() :
painter.drawRect( size.x - radius, size.y - radius, radius, radius )
def __setCursorFromPosition( self, event ) :
radius = self._qtWidget().layout().contentsMargins().left()
size = self.size()
p = event.pos()
if p.x() > size.x - radius and p.y() > size.y - radius :
self.__setCursor( QtCore.Qt.SizeFDiagCursor )
else :
self.__setCursor( None )
def __setCursor( self, cursor ) :
if cursor == self.__cursor :
return
if self.__cursor is not None :
QtGui.QApplication.restoreOverrideCursor()
if cursor is not None :
QtGui.QApplication.setOverrideCursor( QtGui.QCursor( cursor ) )
self.__cursor = cursor
def __closeIfLeft( self ) :
self.close()
class _VisibilityAnimation( QtCore.QVariantAnimation ) :
def __init__( self, window, visible ) :
QtCore.QVariantAnimation.__init__( self )
self.__window = window
startValue = self.__window.windowOpacity()
endValue = 1.0 if visible else 0.0
self.setStartValue( startValue )
self.setEndValue( endValue )
self.setDuration( abs( startValue - endValue ) * 500 )
def updateCurrentValue( self, value ) :
value = GafferUI._Variant.fromVariant( value )
self.__window.setWindowOpacity( value )
if value == 0 :
self.__window.hide()
elif not self.__window.isVisible() :
self.__window.show()
| [
"[email protected]"
] | |
c7867baeca22849ea7b5625a957b27b04f171214 | 3dcb9b9de4e27ee0e7ece48dcd51f920638ca14d | /api/api.py | 291f60faae09d446c7bb503a005fc97f6adb87c9 | [] | no_license | chyld/flask-postgres-react-docker | 4f4a7fb52c52df6fd005af68668a1425139613b1 | e36f36cb32ae259d6472ca7813c4dfb0cb3213da | refs/heads/master | 2021-01-20T07:02:13.625385 | 2017-05-02T06:26:40 | 2017-05-02T06:26:40 | 89,951,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,599 | py | from flask import Flask, jsonify, request
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
from marshmallow import Schema
import os
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:pass1234@db/animals'
db = SQLAlchemy(app)
@app.route('/hello', methods=['GET'])
def hello():
print('hello, hello, hello')
dogs = Dog.query.all()
# schema = DogSchema()
# result = schema.dump(dog)
print('running...')
for dog in dogs:
print('dog {0}:', dog)
return jsonify({'woof': 'boo'})
@app.route('/nested')
def nested():
return jsonify({"a": 3,
"b": True,
"c": None,
"d": "hello json",
"e": 3.14,
"f": [1, 2, 3],
"g": {"x":1, "y":2, "z":3}
})
@app.route('/echo', methods=['POST'])
def echo():
# import IPython
# from IPython import embed
# embed() # this call anywhere in your program will start IPython
# import pdb; pdb.set_trace()
# IPython.start_ipython()
return jsonify(request.json)
class Dog(db.Model):
__tablename__ = "dogs"
id = db.Column('id', db.Integer, primary_key=True)
name = db.Column('name', db.String(100))
age = db.Column('age', db.Integer)
def __init__(self, name, age):
self.name = name
self.age = age
class DogSchema(Schema):
class Meta:
fields = ('id', 'name', 'age')
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=int(os.environ['PORT']))
| [
"[email protected]"
] | |
2e24bb1da5abc68896108ac8b9934925cd0b5c5e | aa0c7bb4935ff68bb4ba2be4332890b760c9dda2 | /ipcv/scalespace.py | 998699a9f9b1b62a439bf745940a9dd6c314086b | [
"MIT"
] | permissive | andersbll/ipcv | 0b4deb5f867a4fd642aa7864769e7f4c4901e809 | ea533def7967c9d3a53002ae109db8b256b51c1d | refs/heads/master | 2021-03-12T23:40:26.990304 | 2014-03-05T13:57:31 | 2014-03-05T13:57:31 | 15,453,581 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 3,575 | py | import numpy as np
from scipy.ndimage.filters import gaussian_filter
class ScaleSpace:
def __init__(self, img_shape, sigmas, dys, dxs):
''' Compute the scale-space of an image.
Upon initialization, this class precomputes the Gaussian windows used
to smooth images of a fixed shape to save the computations at later
points.
'''
assert(len(sigmas) == len(dys) == len(dxs))
h, w = img_shape
g_y, g_x = np.mgrid[-.5+.5/h:.5:1./h, -.5+.5/w:.5: 1./w]
self.filters = []
for sigma, dy, dx in zip(sigmas, dys, dxs):
g = np.exp(- (g_x**2 + g_y**2) * (np.pi*2*sigma)**2 / 2.)
g = np.fft.fftshift(g)
if dy > 0 or dx > 0:
#TODO change list(range to np.linspace or similar
dg_y = np.array((list(range(0, h//2))+list(range(-h//2, 0))),
dtype=float, ndmin=2) / h
dg_x = np.array((list(range(0, w//2))+list(range(-w//2, 0))),
dtype=float, ndmin=2) / w
dg = (dg_y.T**dy) * (dg_x**dx) * (1j*2*np.pi)**(dy + dx)
g = np.multiply(g, dg)
self.filters.append(g)
def compute_f(self, img_f):
''' Compute the scale space of an image in the fourier domain.'''
return [np.multiply(img_f, f) for f in self.filters]
def compute(self, img):
''' Compute the scale space of an image.'''
img_f = np.fft.fft2(img)
return [np.fft.ifft2(np.multiply(img_f, f)).real for f in self.filters]
def scalespace(img, sigma, order=(0, 0)):
'''Compute the scale-space of an image. sigma is the scale parameter. dx
and dy specify the differentiation order along the x and y axis
respectively.'''
ss = ScaleSpace(img.shape, [sigma], [order[0]], [order[1]])
return ss.compute(img)[0]
def gradient_orientation(img, scale, signed=True, fft=False):
'''Calculate gradient orientations at scale sigma.'''
normalizer = scale**2
if fft:
Ly = normalizer*scalespace(img, scale, order=(1, 0))
Lx = normalizer*scalespace(img, scale, order=(0, 1))
else:
mode = 'reflect'
Ly = normalizer*gaussian_filter(img, scale, order=(1, 0), mode=mode)
Lx = normalizer*gaussian_filter(img, scale, order=(0, 1), mode=mode)
if signed:
go = np.arctan2(Ly, Lx)
else:
go = np.arctan(Ly/(Lx + 1e-10))
go_m = np.sqrt(Lx**2+Ly**2)
return go, go_m
def shape_index(img, scale, orientations=False, fft=False):
'''Calculate the shape index at the given scale.'''
normalizer = scale**2
if fft:
Lyy = normalizer*scalespace(img, scale, order=(2, 0))
Lxy = normalizer*scalespace(img, scale, order=(1, 1))
Lxx = normalizer*scalespace(img, scale, order=(0, 2))
else:
mode = 'reflect'
Lyy = normalizer*gaussian_filter(img, scale, order=(2, 0), mode=mode)
Lxy = normalizer*gaussian_filter(img, scale, order=(1, 1), mode=mode)
Lxx = normalizer*gaussian_filter(img, scale, order=(0, 2), mode=mode)
si = np.arctan((-Lxx-Lyy) / (np.sqrt((Lxx - Lyy)**2+4*Lxy**2)+1e-10))
si_c = .5*np.sqrt(Lxx**2 + 2*Lxy**2 + Lyy**2)
if orientations:
t = Lxx + Lyy
d = Lxx*Lyy - Lxy**2
l1 = t/2.0 + np.sqrt(np.abs(t**2/4 - d))
l2 = t/2.0 - np.sqrt(np.abs(t**2/4 - d))
y = l1-Lyy
x = Lxy
si_o = np.arctan(y/(x+1e-10))
si_om = l1-l2
return si, si_c, si_o, si_om
else:
return si, si_c
| [
"[email protected]"
] | |
ee82f549982587ab5b564579fb516fba6bdf691f | 22013212df1e21f29d0180f2109841177a2a8791 | /basic_addons/account_budget_report/reports/__init__.py | 08af8422c824fc2e2e1015f5bb8891ccaf05f79f | [] | no_license | butagreeza/DTDATA_A | f965236c0d7faf0ec4082d27e2a0ff8e7dafe1c6 | 90b09f89714349a3f26de671a440a979aeebd54c | refs/heads/master | 2023-06-18T00:41:02.521432 | 2021-06-14T21:17:06 | 2021-06-14T21:17:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2017-TODAY Cybrosys Technologies(<https://www.cybrosys.com>).
# Author: Jesni Banu(<https://www.cybrosys.com>)
# you can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# It is forbidden to publish, distribute, sublicense, or sell copies
# of the Software or modified copies of the Software.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# GENERAL PUBLIC LICENSE (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import budget_parser
import cross_overed_budget_report
import analytic_budget
| [
"[email protected]"
] | |
9657b3ceec8c66aed46b44498f1668e29d1b6871 | 3b09dc4623dac559c85c0333526d55b0615d79d7 | /problems/56.py | 94bcfe31fbef92738fe0088cba102cb331404cf7 | [] | no_license | Asperas13/leetcode | 5d45bd65c490ada9b3cb2c33331a728eab2ef9b4 | 7f2f1d4f221925945328a355d653d9622107fae7 | refs/heads/master | 2021-09-28T15:54:54.761873 | 2020-05-05T15:29:48 | 2020-05-05T15:30:59 | 145,767,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | class Solution:
def merge(self, intervals: List[List[int]]) -> List[List[int]]:
if len(intervals) < 2:
return intervals
intervals.sort(key=lambda a: a[0])
prev = intervals[0]
result = []
for i in range(1, len(intervals)):
if intervals[i][0] >= prev[0] and intervals[i][0] <= prev[1]:
prev[0] = min(prev[0], intervals[i][0])
prev[1] = max(prev[1], intervals[i][1])
else:
result.append(prev)
prev = intervals[i]
result.append(prev)
return result | [
"[email protected]"
] | |
ba40efc0bfb9c633b665651d2a5df988b9473ea7 | 250d92826005352e418d9bf9f902da4f6e60b85c | /Frittie/venv-frittie/lib/python2.7/site-packages/celery/task/trace.py | 2ac1a503d98c48ce5122543e571784ff46971406 | [
"BSD-2-Clause"
] | permissive | itamsvtd/FrittieHome | 0e4133e6d64b5d2f42824ec7854b080262a49279 | a81a708bf5c7ef2347f4a3e738a46c2776dce9ed | refs/heads/master | 2020-06-06T07:37:51.622937 | 2012-09-27T03:36:05 | 2012-09-27T03:36:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,253 | py | # -*- coding: utf-8 -*-
"""
celery.task.trace
~~~~~~~~~~~~~~~~~~~~
This module defines how the task execution is traced:
errors are recorded, handlers are applied and so on.
"""
from __future__ import absolute_import
# ## ---
# This is the heart of the worker, the inner loop so to speak.
# It used to be split up into nice little classes and methods,
# but in the end it only resulted in bad performance and horrible tracebacks,
# so instead we now use one closure per task class.
import os
import socket
import sys
from warnings import warn
from kombu.utils import kwdict
from celery import current_app
from celery import states, signals
from celery._state import _task_stack, default_app
from celery.app.task import Task as BaseTask, Context
from celery.datastructures import ExceptionInfo
from celery.exceptions import RetryTaskError
from celery.utils.serialization import get_pickleable_exception
from celery.utils.log import get_logger
_logger = get_logger(__name__)
send_prerun = signals.task_prerun.send
prerun_receivers = signals.task_prerun.receivers
send_postrun = signals.task_postrun.send
postrun_receivers = signals.task_postrun.receivers
send_success = signals.task_success.send
success_receivers = signals.task_success.receivers
STARTED = states.STARTED
SUCCESS = states.SUCCESS
RETRY = states.RETRY
FAILURE = states.FAILURE
EXCEPTION_STATES = states.EXCEPTION_STATES
try:
_tasks = default_app._tasks
except AttributeError:
# Windows: will be set later by concurrency.processes.
pass
def mro_lookup(cls, attr, stop=()):
"""Returns the first node by MRO order that defines an attribute.
:keyword stop: A list of types that if reached will stop the search.
:returns None: if the attribute was not found.
"""
for node in cls.mro():
if node in stop:
return
if attr in node.__dict__:
return node
def task_has_custom(task, attr):
"""Returns true if the task or one of its bases
defines ``attr`` (excluding the one in BaseTask)."""
return mro_lookup(task.__class__, attr, stop=(BaseTask, object))
class TraceInfo(object):
__slots__ = ('state', 'retval')
def __init__(self, state, retval=None):
self.state = state
self.retval = retval
def handle_error_state(self, task, eager=False):
store_errors = not eager
if task.ignore_result:
store_errors = task.store_errors_even_if_ignored
return {
RETRY: self.handle_retry,
FAILURE: self.handle_failure,
}[self.state](task, store_errors=store_errors)
def handle_retry(self, task, store_errors=True):
"""Handle retry exception."""
# the exception raised is the RetryTaskError semi-predicate,
# and it's exc' attribute is the original exception raised (if any).
req = task.request
type_, _, tb = sys.exc_info()
try:
pred = self.retval
einfo = ExceptionInfo((type_, pred, tb))
if store_errors:
task.backend.mark_as_retry(req.id, pred.exc, einfo.traceback)
task.on_retry(pred.exc, req.id, req.args, req.kwargs, einfo)
return einfo
finally:
del(tb)
def handle_failure(self, task, store_errors=True):
"""Handle exception."""
req = task.request
type_, _, tb = sys.exc_info()
try:
exc = self.retval
einfo = ExceptionInfo((type_, get_pickleable_exception(exc), tb))
if store_errors:
task.backend.mark_as_failure(req.id, exc, einfo.traceback)
task.on_failure(exc, req.id, req.args, req.kwargs, einfo)
signals.task_failure.send(sender=task, task_id=req.id,
exception=exc, args=req.args,
kwargs=req.kwargs,
traceback=einfo.traceback,
einfo=einfo)
return einfo
finally:
del(tb)
def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
Info=TraceInfo, eager=False, propagate=False):
"""Builts a function that tracing the tasks execution; catches all
exceptions, and saves the state and result of the task execution
to the result backend.
If the call was successful, it saves the result to the task result
backend, and sets the task status to `"SUCCESS"`.
If the call raises :exc:`~celery.exceptions.RetryTaskError`, it extracts
the original exception, uses that as the result and sets the task status
to `"RETRY"`.
If the call results in an exception, it saves the exception as the task
result, and sets the task status to `"FAILURE"`.
Returns a function that takes the following arguments:
:param uuid: The unique id of the task.
:param args: List of positional args to pass on to the function.
:param kwargs: Keyword arguments mapping to pass on to the function.
:keyword request: Request dict.
"""
# If the task doesn't define a custom __call__ method
# we optimize it away by simply calling the run method directly,
# saving the extra method call and a line less in the stack trace.
fun = task if task_has_custom(task, '__call__') else task.run
loader = loader or current_app.loader
backend = task.backend
ignore_result = task.ignore_result
track_started = task.track_started
track_started = not eager and (task.track_started and not ignore_result)
publish_result = not eager and not ignore_result
hostname = hostname or socket.gethostname()
loader_task_init = loader.on_task_init
loader_cleanup = loader.on_process_cleanup
task_on_success = None
task_after_return = None
if task_has_custom(task, 'on_success'):
task_on_success = task.on_success
if task_has_custom(task, 'after_return'):
task_after_return = task.after_return
store_result = backend.store_result
backend_cleanup = backend.process_cleanup
pid = os.getpid()
request_stack = task.request_stack
push_request = request_stack.push
pop_request = request_stack.pop
push_task = _task_stack.push
pop_task = _task_stack.pop
on_chord_part_return = backend.on_chord_part_return
from celery import canvas
subtask = canvas.subtask
def trace_task(uuid, args, kwargs, request=None):
R = I = None
kwargs = kwdict(kwargs)
try:
push_task(task)
task_request = Context(request or {}, args=args,
called_directly=False, kwargs=kwargs)
push_request(task_request)
try:
# -*- PRE -*-
if prerun_receivers:
send_prerun(sender=task, task_id=uuid, task=task,
args=args, kwargs=kwargs)
loader_task_init(uuid, task)
if track_started:
store_result(uuid, {'pid': pid,
'hostname': hostname}, STARTED)
# -*- TRACE -*-
try:
R = retval = fun(*args, **kwargs)
state = SUCCESS
except RetryTaskError, exc:
I = Info(RETRY, exc)
state, retval = I.state, I.retval
R = I.handle_error_state(task, eager=eager)
except Exception, exc:
if propagate:
raise
I = Info(FAILURE, exc)
state, retval = I.state, I.retval
R = I.handle_error_state(task, eager=eager)
[subtask(errback).apply_async((uuid, ))
for errback in task_request.errbacks or []]
except BaseException, exc:
raise
except: # pragma: no cover
# For Python2.5 where raising strings are still allowed
# (but deprecated)
if propagate:
raise
I = Info(FAILURE, None)
state, retval = I.state, I.retval
R = I.handle_error_state(task, eager=eager)
[subtask(errback).apply_async((uuid, ))
for errback in task_request.errbacks or []]
else:
# callback tasks must be applied before the result is
# stored, so that result.children is populated.
[subtask(callback).apply_async((retval, ))
for callback in task_request.callbacks or []]
if publish_result:
store_result(uuid, retval, SUCCESS)
if task_on_success:
task_on_success(retval, uuid, args, kwargs)
if success_receivers:
send_success(sender=task, result=retval)
# -* POST *-
if task_request.chord:
on_chord_part_return(task)
if task_after_return:
task_after_return(state, retval, uuid, args, kwargs, None)
if postrun_receivers:
send_postrun(sender=task, task_id=uuid, task=task,
args=args, kwargs=kwargs,
retval=retval, state=state)
finally:
pop_task()
pop_request()
if not eager:
try:
backend_cleanup()
loader_cleanup()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception, exc:
_logger.error('Process cleanup failed: %r', exc,
exc_info=True)
except Exception, exc:
if eager:
raise
R = report_internal_error(task, exc)
return R, I
return trace_task
def trace_task(task, uuid, args, kwargs, request={}, **opts):
try:
if task.__trace__ is None:
task.__trace__ = build_tracer(task.name, task, **opts)
return task.__trace__(uuid, args, kwargs, request)[0]
except Exception, exc:
return report_internal_error(task, exc)
def trace_task_ret(task, uuid, args, kwargs, request={}):
return _tasks[task].__trace__(uuid, args, kwargs, request)[0]
def eager_trace_task(task, uuid, args, kwargs, request=None, **opts):
opts.setdefault('eager', True)
return build_tracer(task.name, task, **opts)(
uuid, args, kwargs, request)
def report_internal_error(task, exc):
_type, _value, _tb = sys.exc_info()
try:
_value = task.backend.prepare_exception(exc)
exc_info = ExceptionInfo((_type, _value, _tb), internal=True)
warn(RuntimeWarning(
'Exception raised outside body: %r:\n%s' % (
exc, exc_info.traceback)))
return exc_info
finally:
del(_tb)
| [
"[email protected]"
] | |
bacc780f56a918e21b35b9fecc1d2a15d95159bf | 5d1a348e11ad652e6cc8f894d4ca774429f335f9 | /Prob-and-Stats/_Calculators/confidence_intervals.py | 014691dd6abedfa0a271ad2b36d1498a30b5a843 | [] | no_license | anhnguyendepocen/UCSanDiegoX | 5332fe0780540038c0cde70af70d67544a3e7725 | 053a1fae52f9b46188a9fcf10729f70d10b3db63 | refs/heads/master | 2022-04-18T03:23:27.636938 | 2020-03-30T23:29:40 | 2020-03-30T23:29:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 990 | py | import numpy as np
from scipy.stats import norm, t, sem
from math import sqrt
# list = [60, 56, 61, 68, 51, 53, 69, 54, 80, 90, 55, 35, 45]
list = np.random.randint(low=35,high=71, size=20)
print(list)
n = len(list)
mu = np.mean(list)
sigma = np.std(list)
var = np.var(list)
bounds = t.interval(0.90, len(list)-1, loc=np.mean(list), scale=sem(list))
print('The Mean Is =', mu)
print('The Raw Variance ("S^2") Is =', var)
print('The Standard Deviation Is =', sigma)
print('Lower Bounds =', bounds[0])
print('Upper Bounds =', bounds[1])
# the number of tweets a random user is a random variable with sigma=2
# in a sample of 121 users, the sample mean was 3.7
# find the 95% confidence interval for the distribtuion mean.
ci = 0.95
sig = .15
mean = 17.65
users = 50
inv_theta = norm.ppf((1+ci)/2)
std_error = sig/sqrt(users)
tweets_lower = mean - (inv_theta*std_error)
tweets_upper = mean + (inv_theta*std_error)
print('the bounds of number of tweets is =', tweets_lower, tweets_upper)
| [
"[email protected]"
] | |
7caaba5f6d3bc82752e4d751b5c5e178037ab7f7 | 74951991a9e1dbe92d4999da9060409a9492bdc3 | /palindrome-number/palindrome-number.py | 3155e839eb464780b9d419ca27c1b6a61a2bf6d4 | [] | no_license | drpuig/Leetcode-1 | fd800ee2f13c7ce03fa57c8a1d10b3aa6976d7c0 | 4ee104f3069c380e1756dd65f6ff6004554e6c0e | refs/heads/main | 2023-07-15T08:57:32.971194 | 2021-08-21T08:29:24 | 2021-08-21T08:29:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | class Solution:
def isPalindrome(self, num: int) -> bool:
if num < 0: return False
r, x = 0, num
while x > 0:
r = r * 10 + x % 10
x //= 10
return r == num
| [
"[email protected]"
] | |
7eb9e41beacc32274f19363e57b7522cb3378335 | a59d1faced9fe7348ca7143d2a8643e0ebad2132 | /pyvisdk/do/invalid_profile_reference_host.py | 8adf31269e62041bb94e93f6a596abce09a3a869 | [
"MIT"
] | permissive | Infinidat/pyvisdk | c55d0e363131a8f35d2b0e6faa3294c191dba964 | f2f4e5f50da16f659ccc1d84b6a00f397fa997f8 | refs/heads/master | 2023-05-27T08:19:12.439645 | 2014-07-20T11:49:16 | 2014-07-20T11:49:16 | 4,072,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def InvalidProfileReferenceHost(vim, *args, **kwargs):
'''A InvalidProfileReferenceHost fault is thrown when a valid host is not
associated with a profile in the Virtual Center inventory. This could be
because there is no host assciated with the profile or because the associated
host is incompatible with the profile.'''
obj = vim.client.factory.create('{urn:vim25}InvalidProfileReferenceHost')
# do some validation checking...
if (len(args) + len(kwargs)) < 7:
raise IndexError('Expected at least 8 arguments got: %d' % len(args))
required = [ 'host', 'profile', 'reason', 'dynamicProperty', 'dynamicType', 'faultCause',
'faultMessage' ]
optional = [ ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| [
"[email protected]"
] | |
a9aae5af5a7cacba668bf8b9da5cef2adec167b3 | a29c7e363026111276e94b96d39b1b4ab48dbca8 | /sdk/test/test_authorisation_request_response.py | d58f0334a7f10dfe20737e184735fd187ad09325 | [
"MIT"
] | permissive | matteo-kalogirou/yapily-sdk-python | a56bf6f9b1b308efda38f081f6237ebd8c8f8ad5 | f10d2d14383f551eeb59aa893d328ffa5080da22 | refs/heads/master | 2022-12-16T22:24:18.026765 | 2020-09-18T13:59:26 | 2020-09-18T13:59:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,523 | py | # coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 0.0.242
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import yapily
from yapily.models.authorisation_request_response import AuthorisationRequestResponse # noqa: E501
from yapily.rest import ApiException
class TestAuthorisationRequestResponse(unittest.TestCase):
"""AuthorisationRequestResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test AuthorisationRequestResponse
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = yapily.models.authorisation_request_response.AuthorisationRequestResponse() # noqa: E501
if include_optional :
return AuthorisationRequestResponse(
id = '0',
user_uuid = '0',
application_user_id = '0',
reference_id = '0',
institution_id = '0',
status = 'AWAITING_AUTHORIZATION',
created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
transaction_from = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
transaction_to = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
expires_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
time_to_expire_in_millis = 56,
time_to_expire = '0',
feature_scope = [
'INITIATE_PRE_AUTHORISATION'
],
authorisation_url = '0',
consent_token = '0',
qr_code_url = '0'
)
else :
return AuthorisationRequestResponse(
)
def testAuthorisationRequestResponse(self):
"""Test AuthorisationRequestResponse"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a6b04f1468e584f07faee05a9f0038e74e17f645 | e527efa21057bdab7aff6a6b8c753171e75c6bfe | /quetzalcoatl/settings/celery/prod.py | b940474cf5d70d73314d43a75ecee8d4c48b1ffa | [
"WTFPL"
] | permissive | dem4ply/quetzalcoatl | cb49d6cbf0260ebdb127f6d95d39d299684291c6 | f9f72dc72f0da4f264c33128dc01b79f9fda5f2a | refs/heads/master | 2022-01-10T14:37:08.985767 | 2020-03-14T07:47:27 | 2020-03-14T07:47:27 | 235,905,684 | 0 | 0 | WTFPL | 2021-09-22T18:28:28 | 2020-01-23T23:19:15 | Python | UTF-8 | Python | false | false | 1,131 | py | import os
# from kombu import Exchange, Queue
# from celery.schedules import crontab
# from datetime import timedelta
url_key = os.environ[ 'QUETZALCOATL__RABBITMQ__KEY__URL' ]
celery_url = os.environ[ url_key ]
BROKER_URL = celery_url
RESULT_BACKEND = celery_url
CELERY_RESULT_BACKEND = celery_url
'''
task_annotations = {
'*': {
'rate_limit': '5/s'
}
}
'''
# beat_schedule = 'djcelery.schedulers.DatabaseScheduler'
# TASK_QUEUES = (
# Queue( 'default', Exchange( 'task', 'topic' ), routing_key='default' ),
# Queue(
# 'debug', Exchange( 'task_debug', 'topic' ), routing_key='*.debug.*' ),
# )
#
# TASK_DEFAULT_QUEUE = 'default'
# TASK_DEFAULT_EXCHANGE = "tasks"
# TASK_DEFAULT_EXCHANGE_TYPE = "topic"
# TASK_DEFAULT_ROUTING_KEY = "task.default"
#
# TASK_ROUTES = {
# 'default': {
# 'binding_key': 'task.#',
# },
# 'reader_moe.tasks.debug_task': {
# 'queue': 'debug',
# 'binding_key': 'task.debug.*',
# 'exchange': 'task_debug'
# }
# }
#
# beat_schedule = { }
RESULT_SERIALIZER = 'json'
TASK_SERIALIZER = 'json'
CELERY_ALWAYS_EAGER = False
| [
"[email protected]"
] | |
54320cc144accbbc19a2366c523173264961565a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02922/s040859855.py | c93462013926489db291dd42664757224e2579ba | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | import sys
import math
from collections import deque
def input():
return sys.stdin.readline().rstrip()
def main():
A, B = map(int, input().split())
a = math.ceil((B-1)/(A-1))
print(a)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
41aab887f5b4c35a78397323e316aa412cbfc975 | da934e0010380fdc6894063540f61b0ebc2c9ded | /vendor/lockfile/lockfile/linklockfile.py | f8aeaefcfc16578a51a1d2fb4c86a762e01c4047 | [
"Apache-2.0",
"MIT"
] | permissive | bopopescu/cc-2 | ed4f1dfe3c98f476ff619058d99855a16272d36b | 37444fb16b36743c439b0d6c3cac2347e0cc0a94 | refs/heads/master | 2022-11-23T03:57:12.255817 | 2014-10-02T06:10:46 | 2014-10-02T06:10:46 | 282,512,589 | 0 | 0 | Apache-2.0 | 2020-07-25T19:36:05 | 2020-07-25T19:36:05 | null | UTF-8 | Python | false | false | 2,419 | py | from __future__ import absolute_import
import time
import os
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class LinkLockFile(LockBase):
"""Lock access to a file using atomic property of link(2).
>>> lock = LinkLockFile('somefile')
>>> lock = LinkLockFile('somefile', threaded=False)
"""
def acquire(self, timeout=None):
try:
open(self.unique_name, "wb").close()
except IOError:
raise LockFailed("failed to create %s" % self.unique_name)
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a hard link to it.
try:
print 'making a hard link %s to %s' % (self.unique_name,
self.lock_file)
os.link(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
nlinks = os.stat(self.unique_name).st_nlink
if nlinks == 2:
# The original link plus the one I created == 2. We're
# good to go.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
os.unlink(self.unique_name)
if timeout > 0:
raise LockTimeout
else:
raise AlreadyLocked
time.sleep(timeout is not None and timeout/10 or 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
if not self.is_locked():
raise NotLocked
elif not os.path.exists(self.unique_name):
raise NotMyLock
os.unlink(self.unique_name)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.exists(self.lock_file)
def i_am_locking(self):
return (self.is_locked() and
os.path.exists(self.unique_name) and
os.stat(self.unique_name).st_nlink == 2)
def break_lock(self):
if os.path.exists(self.lock_file):
os.unlink(self.lock_file)
| [
"[email protected]"
] | |
498b161763e04089ca2bc69b627c2c265422a62b | e23b28fc3ed196866a04af4e790c1c16b1b5183e | /django/portfolio/apps/portfolio_app/urls.py | 73a949e99ec0e9b82a53e892a13c8fb1443a2aa5 | [] | no_license | diazmc/Python | 6f47e7fcfb8c263eb154d59a5a9b3866e2c9d6a8 | 89e3d54eeb2b0ed7dc7af24103ace6fb6e45d65e | refs/heads/master | 2021-01-20T01:18:23.954877 | 2017-08-24T10:39:19 | 2017-08-24T10:39:19 | 101,283,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^testimonials$', views.testimonial)
]
| [
"[email protected]"
] | |
c451c1854b8bfd9dc2aa1c81ff03ee27356279ce | 7822e004b9697e451a9345589a411133ca12d74e | /scripts/createGradientImage.py | 54fb723a814d6d173509a46a8a6458d07aa24bec | [] | no_license | tomwright01/SLOAntsRegistration | 0e6335feff3f97e59728fdca0f174165df582f4a | 5ff0eb100d40604feae62500c5b8e6cd07c00017 | refs/heads/master | 2021-01-04T14:14:11.212043 | 2014-12-09T20:39:12 | 2014-12-09T20:39:12 | 26,826,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | import subprocess
import logging
import os
def main(dims,outputName,inputName,Sigma,antsPath):
"""Use the ANTs ImageMath to create a gradient image"""
imPath = os.path.join(antsPath,'ImageMath')
cmd = '{0} {1} {2} Grad {3} {4}'.format(imPath,dims,outputName,inputName,Sigma)
logging.info('Creating Gradient Image with command:')
logging.info('=======================')
logging.info(cmd)
logging.info('=======================')
try:
subprocess.check_call(cmd,shell=True,executable='/bin/bash')
return True
except subprocess.CalledProcessError:
return False
| [
"[email protected]"
] | |
f6a87a9dedd704b40464a5040ddb2d851e703ba9 | a9b31181ad6f695a2809018167a52a6d9847c0df | /Chap05-funcoes-frutiferas/compara.py | 2de8e1459dc0075b09a80469e8aaee81d6d62fa9 | [] | no_license | frclasso/Aprendendo_computacao_com_Python | 21cdecdebcdbafad35a48d8425d06e4ec2ba1259 | 40276f396c90d25b301e15e855942a607efd895b | refs/heads/master | 2020-03-12T17:38:04.886153 | 2018-10-11T14:17:13 | 2018-10-11T14:17:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | #!/usr/bin/env python3
def compara(x, y):
if x > y:
return 1
elif x == y:
return 0
else:
return -1
print(compara(1,2))
print(compara(3,2))
print(compara(3,3)) | [
"[email protected]"
] | |
1d7fcddad197b9c1e5b50b8573b0b569e645370a | 35a1593fbd15c8ef1a20971055774a1cdcd41bce | /test/test_rpc_fork.py | 5e2432f60183e5c5213ef1772931d6b7939ae669 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | sahils2607/pytorch | 884a2da2a613b525522a1615c8c5ecef013e4fb1 | 16454095e09eab2e737d99ad569cd44bb7910f03 | refs/heads/master | 2020-08-07T15:13:53.319183 | 2019-10-07T21:22:42 | 2019-10-07T21:24:34 | 213,499,886 | 1 | 0 | NOASSERTION | 2019-10-07T22:39:43 | 2019-10-07T22:39:43 | null | UTF-8 | Python | false | false | 421 | py | #!/usr/bin/env python3
from __future__ import absolute_import, division, print_function, unicode_literals
from rpc_test import RpcTest
from common_distributed import MultiProcessTestCase
from common_utils import run_tests
class RpcTestWithFork(MultiProcessTestCase, RpcTest):
def setUp(self):
super(RpcTestWithFork, self).setUp()
self._fork_processes()
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
3472469d1a6567b5c42751cad45681f14a096b86 | b9bc60cca34c6b4f8a750af6062f357f18dfcae2 | /tensorflow/contrib/ndlstm/python/lstm2d.py | 3907046ddad48c43fe12f40301240acae3703489 | [
"Apache-2.0"
] | permissive | lidenghui1110/tensorflow-0.12.0-fpga | 7c96753aafab5fe79d5d0c500a0bae1251a3d21b | f536d3d0b91f7f07f8e4a3978d362cd21bad832c | refs/heads/master | 2022-11-20T11:42:11.461490 | 2017-07-28T09:28:37 | 2017-07-28T09:28:37 | 98,633,565 | 3 | 2 | Apache-2.0 | 2022-11-15T05:22:07 | 2017-07-28T09:29:01 | C++ | UTF-8 | Python | false | false | 5,639 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A small library of functions dealing with LSTMs applied to images.
Tensors in this library generally have the shape (num_images, height, width,
depth).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.ndlstm.python import lstm1d
def _shape(tensor):
"""Get the shape of a tensor as an int list."""
return tensor.get_shape().as_list()
def images_to_sequence(tensor):
"""Convert a batch of images into a batch of sequences.
Args:
tensor: a (num_images, height, width, depth) tensor
Returns:
(width, num_images*height, depth) sequence tensor
"""
num_image_batches, height, width, depth = _shape(tensor)
transposed = tf.transpose(tensor, [2, 0, 1, 3])
return tf.reshape(transposed, [width, num_image_batches * height, depth])
def sequence_to_images(tensor, num_image_batches):
"""Convert a batch of sequences into a batch of images.
Args:
tensor: (num_steps, num_batches, depth) sequence tensor
num_image_batches: the number of image batches
Returns:
(num_images, height, width, depth) tensor
"""
width, num_batches, depth = _shape(tensor)
height = num_batches // num_image_batches
reshaped = tf.reshape(tensor, [width, num_image_batches, height, depth])
return tf.transpose(reshaped, [1, 2, 0, 3])
def horizontal_lstm(images, num_filters_out, scope=None):
"""Run an LSTM bidirectionally over all the rows of each image.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output depth
scope: optional scope name
Returns:
(num_images, height, width, num_filters_out) tensor, where
num_steps is width and new num_batches is num_image_batches * height
"""
with tf.variable_scope(scope, "HorizontalLstm", [images]):
batch_size, _, _, _ = _shape(images)
sequence = images_to_sequence(images)
with tf.variable_scope("lr"):
hidden_sequence_lr = lstm1d.ndlstm_base(sequence, num_filters_out // 2)
with tf.variable_scope("rl"):
hidden_sequence_rl = (
lstm1d.ndlstm_base(sequence,
num_filters_out - num_filters_out // 2,
reverse=1))
output_sequence = tf.concat(2, [hidden_sequence_lr, hidden_sequence_rl])
output = sequence_to_images(output_sequence, batch_size)
return output
def separable_lstm(images, num_filters_out, nhidden=None, scope=None):
"""Run bidirectional LSTMs first horizontally then vertically.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
nhidden: hidden layer depth
scope: optional scope name
Returns:
(num_images, height, width, num_filters_out) tensor
"""
with tf.variable_scope(scope, "SeparableLstm", [images]):
if nhidden is None:
nhidden = num_filters_out
hidden = horizontal_lstm(images, nhidden)
with tf.variable_scope("vertical"):
transposed = tf.transpose(hidden, [0, 2, 1, 3])
output_transposed = horizontal_lstm(transposed, num_filters_out)
output = tf.transpose(output_transposed, [0, 2, 1, 3])
return output
def reduce_to_sequence(images, num_filters_out, scope=None):
"""Reduce an image to a sequence by scanning an LSTM vertically.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
scope: optional scope name
Returns:
A (width, num_images, num_filters_out) sequence.
"""
with tf.variable_scope(scope, "ReduceToSequence", [images]):
batch_size, height, width, depth = _shape(images)
transposed = tf.transpose(images, [1, 0, 2, 3])
reshaped = tf.reshape(transposed, [height, batch_size * width, depth])
reduced = lstm1d.sequence_to_final(reshaped, num_filters_out)
output = tf.reshape(reduced, [batch_size, width, num_filters_out])
return output
def reduce_to_final(images, num_filters_out, nhidden=None, scope=None):
"""Reduce an image to a final state by running two LSTMs.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
nhidden: hidden layer depth (defaults to num_filters_out)
scope: optional scope name
Returns:
A (num_images, num_filters_out) batch.
"""
with tf.variable_scope(scope, "ReduceToFinal", [images]):
nhidden = nhidden or num_filters_out
batch_size, height, width, depth = _shape(images)
transposed = tf.transpose(images, [1, 0, 2, 3])
reshaped = tf.reshape(transposed, [height, batch_size * width, depth])
with tf.variable_scope("reduce1"):
reduced = lstm1d.sequence_to_final(reshaped, nhidden)
transposed_hidden = tf.reshape(reduced, [batch_size, width, nhidden])
hidden = tf.transpose(transposed_hidden, [1, 0, 2])
with tf.variable_scope("reduce2"):
output = lstm1d.sequence_to_final(hidden, num_filters_out)
return output
| [
"[email protected]"
] | |
e6b0e6837166020928a9bfbdf5bc302fa4f86ad8 | 7dfa21d74dae975082c6d5deaa01248bac1dcc26 | /.circleci/cimodel/data/pytorch_build_data.py | 09476a970b40045f3d53a7de2f01f11f71d683ae | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | mruberry/pytorch | 88cf536ed58d20a409c1e5119be4ec04ec960082 | 19f73180cfb39eb67110d2a1d541975a49211453 | refs/heads/master | 2022-02-03T16:25:31.070089 | 2019-04-22T17:52:28 | 2019-04-22T17:58:15 | 130,132,886 | 4 | 1 | NOASSERTION | 2020-01-16T16:51:39 | 2018-04-18T23:24:38 | C++ | UTF-8 | Python | false | false | 3,956 | py | #!/usr/bin/env python3
from cimodel.lib.conf_tree import ConfigNode, X
CONFIG_TREE_DATA = [
("trusty", [
(None, [
X("2.7.9"),
X("2.7"),
X("3.5"),
X("nightly"),
]),
("gcc", [
("4.8", [X("3.6")]),
("5.4", [("3.6", [X(False), X(True)])]),
("7", [X("3.6")]),
]),
]),
("xenial", [
("clang", [
("5", [X("3.6")]),
]),
("cuda", [
("8", [X("3.6")]),
("9", [
# Note there are magic strings here
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L21
# and
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L143
# and
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L153
# (from https://github.com/pytorch/pytorch/pull/17323#discussion_r259453144)
X("2.7"),
X("3.6"),
]),
("9.2", [X("3.6")]),
("10", [X("3.6")]),
]),
("android", [
("r19c", [X("3.6")]),
]),
]),
]
def get_major_pyver(dotted_version):
parts = dotted_version.split(".")
return "py" + parts[0]
class TreeConfigNode(ConfigNode):
def __init__(self, parent, node_name, subtree):
super(TreeConfigNode, self).__init__(parent, self.modify_label(node_name))
self.subtree = subtree
self.init2(node_name)
def modify_label(self, label):
return label
def init2(self, node_name):
pass
def get_children(self):
return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]
class TopLevelNode(TreeConfigNode):
def __init__(self, node_name, subtree):
super(TopLevelNode, self).__init__(None, node_name, subtree)
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return DistroConfigNode
class DistroConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["distro_name"] = node_name
def child_constructor(self):
distro = self.find_prop("distro_name")
next_nodes = {
"trusty": TrustyCompilerConfigNode,
"xenial": XenialCompilerConfigNode,
}
return next_nodes[distro]
class TrustyCompilerConfigNode(TreeConfigNode):
def modify_label(self, label):
return label or "<unspecified>"
def init2(self, node_name):
self.props["compiler_name"] = node_name
def child_constructor(self):
return TrustyCompilerVersionConfigNode if self.props["compiler_name"] else PyVerConfigNode
class TrustyCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
class PyVerConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["pyver"] = node_name
self.props["abbreviated_pyver"] = get_major_pyver(node_name)
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return XlaConfigNode
class XlaConfigNode(TreeConfigNode):
def modify_label(self, label):
return "XLA=" + str(label)
def init2(self, node_name):
self.props["is_xla"] = node_name
class XenialCompilerConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_name"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return XenialCompilerVersionConfigNode
class XenialCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
| [
"[email protected]"
] | |
71dafe2db4bc761973d6704dc92903b815a5d803 | df7f13ec34591fe1ce2d9aeebd5fd183e012711a | /hata/discord/channel/channel_metadata/tests/test__parse_video_quality_mode.py | 1d27462067dbc38950831d7cb97ceae62bdabb9d | [
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | HuyaneMatsu/hata | 63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e | 53f24fdb38459dc5a4fd04f11bdbfee8295b76a4 | refs/heads/master | 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 | Apache-2.0 | 2019-12-18T03:46:12 | 2018-12-31T14:59:47 | Python | UTF-8 | Python | false | false | 594 | py | import vampytest
from ..preinstanced import VideoQualityMode
from ..fields import parse_video_quality_mode
def test__parse_video_quality_mode():
"""
Tests whether ``parse_video_quality_mode`` works as intended.
"""
for input_data, expected_output in (
({}, VideoQualityMode.auto),
({'video_quality_mode': VideoQualityMode.auto.value}, VideoQualityMode.auto),
({'video_quality_mode': VideoQualityMode.full.value}, VideoQualityMode.full),
):
output = parse_video_quality_mode(input_data)
vampytest.assert_eq(output, expected_output)
| [
"[email protected]"
] | |
02870225cf065083ba4335fd8a97915249b45f48 | cf50ea39bfd5a7dee49f10c5889637131bb40c74 | /11-CHAPTER/3-multiple-inheritance.py | f44b08ef42b3cd67a92e4e03882b0df37fad6336 | [] | no_license | Rishi05051997/Python-Notes | 4878b1760731d7b7f5060f320ec9758fc5946536 | 1c7c1d927e1c78be430d7131f569e3272f8e81ad | refs/heads/main | 2023-07-15T03:00:06.498240 | 2021-08-24T05:27:46 | 2021-08-24T05:27:46 | 377,142,221 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | class Employee:
company = "Visa"
eCode = 120
class Freelancer:
company = "Fiverr"
level = 0
def upgradeLevel(self):
self.level = self.level + 1
class Programmer(Employee, Freelancer):
name = "Vrushabh"
p = Programmer()
p.upgradeLevel()
print(p.level)
print(p.company)
| [
"[email protected]"
] | |
8e15123ac1006ef3d53de1573baf06184dd75c95 | 48832d27da16256ee62c364add45f21b968ee669 | /res_bw/scripts/common/lib/plat-mac/carbon/lists.py | 4fc2252e450153f6e01beee179948af9cb5f9698 | [] | no_license | webiumsk/WOT-0.9.15.1 | 0752d5bbd7c6fafdd7f714af939ae7bcf654faf7 | 17ca3550fef25e430534d079876a14fbbcccb9b4 | refs/heads/master | 2021-01-20T18:24:10.349144 | 2016-08-04T18:08:34 | 2016-08-04T18:08:34 | 64,955,694 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,082 | py | # 2016.08.04 20:01:15 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/plat-mac/Carbon/Lists.py
def FOUR_CHAR_CODE(x):
return x
listNotifyNothing = FOUR_CHAR_CODE('nada')
listNotifyClick = FOUR_CHAR_CODE('clik')
listNotifyDoubleClick = FOUR_CHAR_CODE('dblc')
listNotifyPreClick = FOUR_CHAR_CODE('pclk')
lDrawingModeOffBit = 3
lDoVAutoscrollBit = 1
lDoHAutoscrollBit = 0
lDrawingModeOff = 8
lDoVAutoscroll = 2
lDoHAutoscroll = 1
lOnlyOneBit = 7
lExtendDragBit = 6
lNoDisjointBit = 5
lNoExtendBit = 4
lNoRectBit = 3
lUseSenseBit = 2
lNoNilHiliteBit = 1
lOnlyOne = -128
lExtendDrag = 64
lNoDisjoint = 32
lNoExtend = 16
lNoRect = 8
lUseSense = 4
lNoNilHilite = 2
lInitMsg = 0
lDrawMsg = 1
lHiliteMsg = 2
lCloseMsg = 3
kListDefProcPtr = 0
kListDefUserProcType = kListDefProcPtr
kListDefStandardTextType = 1
kListDefStandardIconType = 2
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\plat-mac\carbon\lists.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.08.04 20:01:15 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
1dbbd38333e4bdfa695a265eab97dede7839959c | 893a2fea722b77148f1fb1cac066ce476f1afa0a | /codeforces/cf_beta_85/problem2.py | 8c260d049262d5c1c10a34300dcf43695e7bd3a3 | [] | no_license | the-brainiac/contests | feb9f1ee1abdfb3cc9dccd5a69623192b4ec09ed | b95426aa3e54e703f7924fe0f222c2915e07c8f7 | refs/heads/main | 2023-05-12T13:10:11.765678 | 2021-06-03T04:05:50 | 2021-06-03T04:05:50 | 373,376,225 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | N = 10**5
is_prime = [1]*N
# We know 0 and 1 are composites
is_prime[0] = 0
is_prime[1] = 0
def sieve():
"""
We cross out all composites from 2 to sqrt(N)
"""
i = 2
# This will loop from 2 to int(sqrt(x))
while i*i <= N:
# If we already crossed out this number, then continue
if is_prime[i] == 0:
i += 1
continue
j = 2*i
while j < N:
# Cross out this as it is composite
is_prime[j] = 0
# j is incremented by i, because we want to cover all multiples of i
j += i
i += 1
sieve()
def nextPrime(n):
i = n
while True:
if is_prime[i]:
return i
i += 1
for _ in range(int(input())):
d = int(input())
k1 = nextPrime(d+1)
k2 = nextPrime(d+k1)
print(k1*k2) | [
"[email protected]"
] | |
ab512419d61466446b1eb72ac84831498db20e06 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-network/azure/mgmt/network/v2018_02_01/operations/express_route_circuits_operations.py | 3f54bb957557bb672da6c0a1511c65b6114abde7 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 46,037 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ExpressRouteCircuitsOperations(object):
"""ExpressRouteCircuitsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client API version. Constant value: "2018-02-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-02-01"
self.config = config
def _delete_initial(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'}
def get(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ExpressRouteCircuit or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'}
def _create_or_update_initial(
self, resource_group_name, circuit_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, circuit_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express
route circuit operation.
:type parameters:
~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns ExpressRouteCircuit or
ClientRawResponse<ExpressRouteCircuit> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'}
def _update_tags_initial(
self, resource_group_name, circuit_name, tags=None, custom_headers=None, raw=False, **operation_config):
parameters = models.TagsObject(tags=tags)
# Construct URL
url = self.update_tags.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'TagsObject')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update_tags(
self, resource_group_name, circuit_name, tags=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates an express route circuit tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns ExpressRouteCircuit or
ClientRawResponse<ExpressRouteCircuit> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
tags=tags,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'}
def _list_arp_table_initial(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.list_arp_table.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_arp_table(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, polling=True, **operation_config):
"""Gets the currently advertised ARP table associated with the express
route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
ExpressRouteCircuitsArpTableListResult or
ClientRawResponse<ExpressRouteCircuitsArpTableListResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsArpTableListResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsArpTableListResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'}
def _list_routes_table_initial(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.list_routes_table.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_routes_table(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, polling=True, **operation_config):
"""Gets the currently advertised routes table associated with the express
route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
ExpressRouteCircuitsRoutesTableListResult or
ClientRawResponse<ExpressRouteCircuitsRoutesTableListResult> if
raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsRoutesTableListResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsRoutesTableListResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'}
def _list_routes_table_summary_initial(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.list_routes_table_summary.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_routes_table_summary(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, polling=True, **operation_config):
"""Gets the currently advertised routes table summary associated with the
express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
ExpressRouteCircuitsRoutesTableSummaryListResult or
ClientRawResponse<ExpressRouteCircuitsRoutesTableSummaryListResult> if
raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'}
def get_stats(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ExpressRouteCircuitStats or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitStats or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_stats.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitStats', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'}
def get_peering_stats(
self, resource_group_name, circuit_name, peering_name, custom_headers=None, raw=False, **operation_config):
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ExpressRouteCircuitStats or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitStats or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_peering_stats.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitStats', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_peering_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'}
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of ExpressRouteCircuit
:rtype:
~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitPaged[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'}
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all the express route circuits in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of ExpressRouteCircuit
:rtype:
~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuitPaged[~azure.mgmt.network.v2018_02_01.models.ExpressRouteCircuit]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'}
| [
"[email protected]"
] | |
61f2b2619c96c01b5dda1b6c9aeb86457872c271 | 0ee8350bedb5c8ac575ee0b634fece214a06646a | /poezio/asyncio.py | 2b02a91ffd66a8b6a9a97f66ca7342e5dcce6026 | [
"Zlib",
"CC-BY-2.0"
] | permissive | LukeMarlin/poezio | 3fcad784d37aa665850b649622d6f8d75cc1fa3f | 884aae28a24d65951cc7d57f6044098f236f52bc | refs/heads/master | 2021-01-09T20:12:18.234842 | 2016-10-28T15:26:02 | 2016-10-28T15:57:38 | 72,423,788 | 1 | 0 | null | 2016-10-31T09:54:48 | 2016-10-31T09:54:48 | null | UTF-8 | Python | false | false | 1,353 | py | """
A module that monkey patches the standard asyncio module to add an
idle_call() method to the main loop. This method is used to execute a
callback whenever the loop is not busy handling anything else. This means
that it is a callback with lower priority than IO, timer, or even
call_soon() ones. These callback are called only once each.
"""
import asyncio
import functools
import collections
from asyncio import events
import slixmpp
def monkey_patch_asyncio_slixmpp():
def idle_call(self, callback):
if asyncio.iscoroutinefunction(callback):
raise TypeError("coroutines cannot be used with idle_call()")
handle = events.Handle(callback, [], self)
self._idle.append(handle)
def my_run_once(self):
if self._idle:
self._ready.append(events.Handle(lambda: None, (), self))
real_run_once(self)
if self._idle:
handle = self._idle.popleft()
handle._run()
cls = asyncio.get_event_loop().__class__
cls._idle = collections.deque()
cls.idle_call = idle_call
real_run_once = cls._run_once
cls._run_once = my_run_once
spawn_event = slixmpp.xmlstream.XMLStream._spawn_event
def patchy(self, xml):
self.loop.idle_call(functools.partial(spawn_event, self, xml))
slixmpp.xmlstream.XMLStream._spawn_event = patchy
| [
"[email protected]"
] | |
36119431fd312a3e8902674067afbe6396c63da9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/219/29883/submittedfiles/testes.py | 4f4b4f48b93942c5a8eddaabeee18acfd3de9bd6 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
a=int(input('Digite a:'))
b=int(input('Digite b:'))
c=int(input('Digite c:'))
d=int(input('Digite d:'))
if a>=b and a>=c and a>=d:
print(a)
if b<=c and b<=d:
print(b)
elif b>=a and b>=c and b>=d
print(
| [
"[email protected]"
] | |
4455b3a1b142bedf192ae2f451c4ff35db376820 | 176c59cf09d42c66d4101eca52beb9c3ea7362a1 | /pyramid_authsanity/tests/test_includeme.py | ad635c82cfa576e918a2c5fbe5ec15a7f88c8027 | [
"ISC"
] | permissive | stevepiercy/pyramid_authsanity | 146d90abcf7622e1d509eb069bfbbf80ed61acc8 | daf7188a8ab1a8bd215d9e1e1cb6682e87fa8ac7 | refs/heads/master | 2021-07-16T08:33:46.683994 | 2016-01-10T05:48:32 | 2016-01-10T05:48:32 | 51,718,108 | 0 | 0 | null | 2016-02-14T22:53:13 | 2016-02-14T22:53:13 | null | UTF-8 | Python | false | false | 3,043 | py | import pytest
from pyramid.authorization import ACLAuthorizationPolicy
import pyramid.testing
from zope.interface import (
Interface,
implementedBy,
providedBy,
)
from zope.interface.verify import (
verifyClass,
verifyObject
)
from pyramid_services import IServiceClassifier
from pyramid_authsanity.interfaces import (
IAuthSourceService,
)
class TestAuthServicePolicyIntegration(object):
@pytest.fixture(autouse=True)
def pyramid_config(self, request):
from pyramid.interfaces import IDebugLogger
self.config = pyramid.testing.setUp()
self.config.set_authorization_policy(ACLAuthorizationPolicy())
def finish():
del self.config
pyramid.testing.tearDown()
request.addfinalizer(finish)
def _makeOne(self, settings):
self.config.registry.settings.update(settings)
self.config.include('pyramid_authsanity')
def test_include_me(self):
from pyramid_authsanity.policy import AuthServicePolicy
self._makeOne({})
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
with pytest.raises(ValueError):
find_service_factory(self.config, IAuthSourceService)
def test_include_me_cookie_no_secret(self):
settings = {'authsanity.source': 'cookie'}
with pytest.raises(RuntimeError):
self._makeOne(settings)
def test_include_me_cookie_with_secret(self):
from pyramid_authsanity.policy import AuthServicePolicy
settings = {'authsanity.source': 'cookie', 'authsanity.secret': 'sekrit'}
self._makeOne(settings)
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService))
def test_include_me_session(self):
from pyramid_authsanity.policy import AuthServicePolicy
settings = {'authsanity.source': 'session'}
self._makeOne(settings)
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService))
def find_service_factory(
config,
iface=Interface,
):
context_iface = providedBy(None)
svc_types = (IServiceClassifier, context_iface)
adapters = config.registry.adapters
svc_factory = adapters.lookup(svc_types, iface, name='')
if svc_factory is None:
raise ValueError('could not find registered service')
return svc_factory
| [
"[email protected]"
] | |
0a0504b0bc50786ad6319cc72a59f6bd7ed5d613 | 8f7c595f2b9d075a89417760b7fbf9abb1fecb72 | /try_enricher.py | e94463b5038fad8d0620db03b893bab816739527 | [
"MIT"
] | permissive | MainakMaitra/trading-utils | 555ed240a20b26d4876f1490fc8a2d9273231fc5 | 3e73091b4d3432e74c385a9677b7f7ca4192c67f | refs/heads/main | 2023-07-04T09:19:40.122188 | 2021-08-08T09:01:37 | 2021-08-08T09:01:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | import json
import matplotlib.pyplot as plt
import pandas as pd
from common.analyst import fetch_data_from_cache
plt.ioff()
pd.set_option("display.max_columns", None)
pd.set_option("display.width", None)
ticker = "AAPL"
# weekly_options.set_index('Symbol', inplace=True)
# cboe_options = pd.read_csv(f"data/cboesymboldirequityindex.csv")
# print(has_options('AAPL'))
# data, ticker_df = fetch_data_on_demand(ticker)
data = fetch_data_from_cache(ticker, is_etf=False)
key_values = list([(k, data[k]) for k in data.keys() if "month_" in k])
for kv in key_values:
print(kv)
# weekly_ticker_candles = convert_to_weekly(df)
#
# for wp in [4, 8]:
# df[["max_weekly_{}".format(wp), "max_weekly_{}_at".format(wp)]] = max_weekly(
# weekly_ticker_candles, week_until=wp
# )
# print(df)
| [
"[email protected]"
] | |
0ab91b8fc7a8f722176caa772e62d86f3f98bab8 | ecbf6a7c04b068a4f5606bbab46b974e53bd28d8 | /src/replace_localparam.py | debb116037d08c9f96c454b86e2bebe117057dc9 | [
"MIT"
] | permissive | jamesjiang52/V2SV | 970357be757ba068111645fd6964e8672a72f69f | 4b6109d16482131785b9dfec13fd66452078ae17 | refs/heads/main | 2022-12-30T06:47:27.814305 | 2020-10-18T00:07:39 | 2020-10-18T00:07:39 | 300,928,285 | 1 | 2 | MIT | 2020-10-18T00:06:56 | 2020-10-03T16:41:38 | Python | UTF-8 | Python | false | false | 4,354 | py | def __remove_extra_declarations(module_string, replaced_wires, debug=False):
buffer = module_string[:module_string.index(");") + 2] + "\n"
body_string = module_string[module_string.index(");") + 2:]
statements = body_string.split(";")
# remove the previous declarations of any new enums
for statement in statements:
words = statement.split()
if not words:
continue
if words[0] in ["reg", "wire", "logic"]:
if ":" in words[1]:
# wire is an array
signals = statement[statement.index("]") + 1:].split()
else:
signals = words[1:]
signals = [signal[:-1] if signal[-1] == "," else signal for signal in signals]
signals_remaining = signals[:]
for signal in signals:
if signal in replaced_wires:
signals_remaining.remove(signal)
if signals_remaining == signals:
# none of these signals were changed to enums
buffer += "{};\n".format(" ".join(words))
elif signals_remaining == []:
# all signals are declared as new enums now, so don't write anything
if debug:
print("Removed:\n{}\n\n".format(" ".join(words)))
else:
new_statement = "logic " # might as well do this
if ":" in words[1]:
# wire is an array
new_statement += words[1] + " "
for signal in signals_remaining:
new_statement += signal + ", "
# remove trailing comma from last wire
if new_statement[-2] == ",":
new_statement = new_statement[:-2]
buffer += "{};\n".format(new_statement)
if debug:
print("Replaced:\n{}\nwith\n{}\n\n".format(" ".join(words), new_statement))
else:
# don't care
buffer += "{};\n".format(" ".join(words))
# remove trailing semicolon from endmodule
if buffer[-2] == ";":
buffer = buffer[:-2] + "\n"
return buffer
def replace_localparam(module_string, debug=False):
buffer = module_string[:module_string.index(");") + 2] + "\n"
body_string = module_string[module_string.index(");") + 2:]
statements = body_string.split(";")
replaced_wires = []
for statement in statements:
words = statement.split()
if not words:
continue
if words[0] == "localparam":
new_statement = "enum int unsigned {\n"
params = []
pair_strings = "".join(words[1:]).split(",")
# get all localparam names
for pair_string in pair_strings:
param = pair_string.split("=")[0]
new_statement += param + ",\n"
params.append(param)
# remove trailing comma from last param
if new_statement[-2] == ",":
new_statement = new_statement[:-2] + "\n} "
# need to search for wires that are being assigned to these localparams,
# and declare these as the new enums
for statement_i in statements:
if "=" in statement_i or "<=" in statement_i:
statement_i = statement_i.replace("<=", "=")
words_i = statement_i.split()
if words_i[-1] in params:
wire = statement_i[:statement_i.index("=")].split()[-1]
if wire not in replaced_wires:
new_statement += wire + ", "
replaced_wires.append(wire)
else:
# don't care
pass
# remove trailing comma from last wire
if new_statement[-2] == ",":
new_statement = new_statement[:-2]
buffer += "{};\n".format(new_statement)
if debug:
print("Replaced:\n{}\nwith\n{}\n\n".format(" ".join(words), new_statement))
else:
# don't care at all about anything else
buffer += "{};\n".format(" ".join(words))
buffer = __remove_extra_declarations(buffer, replaced_wires, debug=debug)
return buffer
| [
"[email protected]"
] | |
d63408c2d9adafeadf3ac5e64efccfc40b438cae | 025fa245d4cbffdaa422287ed2f31c4d0442ee28 | /menus/models.py | 27649dd15508def3b9d933b9bfa95ba0bc8eb771 | [
"MIT"
] | permissive | elcolie/zero-to-deploy | 01f346ca50b8ccb271faef23934abe6a487baca6 | 6191a33ef55af7c550c0e529a4e373bfe40bc014 | refs/heads/master | 2022-02-08T23:22:17.008555 | 2018-06-15T19:39:06 | 2018-06-15T19:39:06 | 137,083,690 | 0 | 0 | MIT | 2022-01-21T19:35:33 | 2018-06-12T14:28:01 | Python | UTF-8 | Python | false | false | 653 | py | from django.db import models
from djchoices import DjangoChoices, ChoiceItem
from commons.models import AbstractTimestamp
class Menu(AbstractTimestamp):
class BackType(DjangoChoices):
food = ChoiceItem(f"Food")
drink = ChoiceItem(f"Drink")
menu_type = models.CharField(max_length=15, choices=BackType.choices, default=BackType.food)
name = models.CharField(max_length=20)
image = models.ImageField(default='sr.png', upload_to='menus')
take_home = models.BooleanField(default=False)
price = models.DecimalField(max_digits=6, decimal_places=2)
def __str__(self):
return f"{self.name} {self.price}"
| [
"[email protected]"
] | |
976cddf10f6864ba5c9a7a761545d47337c3af20 | 4789ee577801e55bb6209345df6ddd1adff58aa9 | /skyline/boundary/boundary_alerters.py | 0f63c5c74e6d30f89ea03ffca79842f2fafdab45 | [
"MIT"
] | permissive | bastienboutonnet/skyline | 76767fdad5eb9b9ee9bb65bfcee05e2551061fbe | 7f19fcc7ac1177b4a0a4663d6e645be63ceea452 | refs/heads/master | 2023-04-25T01:57:17.955874 | 2021-04-11T09:20:30 | 2021-04-11T09:20:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62,389 | py | from __future__ import division
import logging
import traceback
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# hashlib not used
# import hashlib
from smtplib import SMTP
# @added 20200122: Feature #3396: http_alerter
from ast import literal_eval
import requests
import boundary_alerters
try:
import urllib2
except ImportError:
import urllib.request
import urllib.error
# @added 20191023 - Task #3290: Handle urllib2 in py3
# Branch #3262: py3
# Use urlretrieve
try:
import urllib2 as urllib
except ImportError:
from urllib import request as urllib
import re
from requests.utils import quote
from time import time
import datetime
import os.path
import sys
# @added 20181126 - Task #2742: Update Boundary
# Feature #2618: alert_slack
# Added dt, redis, gmtime and strftime
import datetime as dt
# import redis
from time import (gmtime, strftime)
# @added 20201127 - Feature #3820: HORIZON_SHARDS
from os import uname
python_version = int(sys.version_info[0])
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# charset no longer used
# from email import charset
if python_version == 2:
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEImage import MIMEImage
if python_version == 3:
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
sys.path.insert(0, os.path.dirname(__file__))
if True:
import settings
# @added 20181126 - Task #2742: Update Boundary
# Feature #2034: analyse_derivatives
# Feature #2618: alert_slack
from skyline_functions import (
write_data_to_file, in_list,
is_derivative_metric, get_graphite_graph_image,
# @added 20191030 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# Added a single functions to deal with Redis connection and the
# charset='utf-8', decode_responses=True arguments required in py3
get_redis_conn_decoded,
# @modified 20191105 - Branch #3002: docker
# Branch #3262: py3
get_graphite_port, get_graphite_render_uri, get_graphite_custom_headers,
# @added 20200122: Feature #3396: http_alerter
get_redis_conn,
# @added 20200825 - Feature #3704: Add alert to anomalies
add_panorama_alert,
# @added 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
encode_graphite_metric_name)
# @added 20201127 - Feature #3820: HORIZON_SHARDS
try:
HORIZON_SHARDS = settings.HORIZON_SHARDS.copy()
except:
HORIZON_SHARDS = {}
this_host = str(uname()[1])
HORIZON_SHARD = 0
if HORIZON_SHARDS:
HORIZON_SHARD = HORIZON_SHARDS[this_host]
skyline_app = 'boundary'
skyline_app_logger = '%sLog' % skyline_app
logger = logging.getLogger(skyline_app_logger)
skyline_app_logfile = '%s/%s.log' % (settings.LOG_PATH, skyline_app)
"""
Create any alerter you want here. The function is invoked from trigger_alert.
7 arguments will be passed in as strings:
alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp
"""
# FULL_DURATION to hours so that Boundary surfaces the relevant timeseries data
# in the graph
try:
full_duration_seconds = int(settings.FULL_DURATION)
except:
full_duration_seconds = 86400
full_duration_in_hours = full_duration_seconds / 60 / 60
try:
graphite_previous_hours = int(settings.BOUNDARY_SMTP_OPTS['graphite_previous_hours'])
except:
graphite_previous_hours = full_duration_in_hours
try:
graphite_graph_line_color = int(settings.BOUNDARY_SMTP_OPTS['graphite_graph_line_color'])
except:
graphite_graph_line_color = 'pink'
# @added 20200122 - Branch #3002: docker
try:
DOCKER_FAKE_EMAIL_ALERTS = settings.DOCKER_FAKE_EMAIL_ALERTS
except:
DOCKER_FAKE_EMAIL_ALERTS = False
def alert_smtp(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
sender = settings.BOUNDARY_SMTP_OPTS['sender']
matched_namespaces = []
for namespace in settings.BOUNDARY_SMTP_OPTS['recipients']:
CHECK_MATCH_PATTERN = namespace
check_match_pattern = re.compile(CHECK_MATCH_PATTERN)
pattern_match = check_match_pattern.match(metric_name)
if pattern_match:
matched_namespaces.append(namespace)
matched_recipients = []
for namespace in matched_namespaces:
for recipients in settings.BOUNDARY_SMTP_OPTS['recipients'][namespace]:
matched_recipients.append(recipients)
def unique_noHash(seq):
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
recipients = unique_noHash(matched_recipients)
# Backwards compatibility
if type(recipients) is str:
recipients = [recipients]
# @added 20180524 - Task #2384: Change alerters to cc other recipients
# The alerters did send an individual email to each recipient. This would be
# more useful if one email was sent with the first smtp recipient being the
# to recipient and the subsequent recipients were add in cc.
primary_recipient = False
cc_recipients = False
if recipients:
for i_recipient in recipients:
if not primary_recipient:
primary_recipient = str(i_recipient)
if primary_recipient != i_recipient:
if not cc_recipients:
cc_recipients = str(i_recipient)
else:
new_cc_recipients = '%s,%s' % (str(cc_recipients), str(i_recipient))
cc_recipients = str(new_cc_recipients)
logger.info(
'alert_smtp - will send to primary_recipient :: %s, cc_recipients :: %s' %
(str(primary_recipient), str(cc_recipients)))
alert_algo = str(algorithm)
alert_context = alert_algo.upper()
# @added 20191008 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
try:
main_alert_title = settings.CUSTOM_ALERT_OPTS['main_alert_title']
except:
main_alert_title = 'Skyline'
try:
app_alert_context = settings.CUSTOM_ALERT_OPTS['boundary_alert_heading']
except:
app_alert_context = 'Boundary'
# @modified 20191002 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
# Use alert_context
# unencoded_graph_title = 'Skyline Boundary - %s at %s hours - %s - %s' % (
# alert_context, graphite_previous_hours, metric_name, datapoint)
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# unencoded_graph_title = '%s %s - %s at %s hours - %s - %s' % (
# main_alert_title, app_alert_context, alert_context, graphite_previous_hours, metric_name, datapoint)
unencoded_graph_title = '%s %s - %s %s %s times - %s' % (
main_alert_title, app_alert_context, alert_context, str(metric_trigger),
str(alert_threshold), str(datapoint))
# @added 20181126 - Task #2742: Update Boundary
# Feature #2034: analyse_derivatives
# Added deriative functions to convert the values of metrics strictly
# increasing monotonically to their deriative products in alert graphs and
# specify it in the graph_title
known_derivative_metric = False
try:
# @modified 20180519 - Feature #2378: Add redis auth to Skyline and rebrow
# @modified 20191030 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
# Use get_redis_conn_decoded
# if settings.REDIS_PASSWORD:
# # @modified 20191022 - Bug #3266: py3 Redis binary objects not strings
# # Branch #3262: py3
# # REDIS_ALERTER_CONN = redis.StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH)
# REDIS_ALERTER_CONN = redis.StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH, charset='utf-8', decode_responses=True)
# else:
# # REDIS_ALERTER_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
# REDIS_ALERTER_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH, charset='utf-8', decode_responses=True)
REDIS_ALERTER_CONN = get_redis_conn_decoded(skyline_app)
except:
logger.error('error :: alert_smtp - redis connection failed')
# @modified 20191022 - Bug #3266: py3 Redis binary objects not strings
# Branch #3262: py3
try:
derivative_metrics = list(REDIS_ALERTER_CONN.smembers('derivative_metrics'))
except:
derivative_metrics = []
redis_metric_name = '%s%s' % (settings.FULL_NAMESPACE, str(metric_name))
if redis_metric_name in derivative_metrics:
known_derivative_metric = True
if known_derivative_metric:
try:
non_derivative_monotonic_metrics = settings.NON_DERIVATIVE_MONOTONIC_METRICS
except:
non_derivative_monotonic_metrics = []
skip_derivative = in_list(redis_metric_name, non_derivative_monotonic_metrics)
if skip_derivative:
known_derivative_metric = False
known_derivative_metric = is_derivative_metric(skyline_app, metric_name)
if known_derivative_metric:
# @modified 20191002 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
# unencoded_graph_title = 'Skyline Boundary - %s at %s hours - derivative graph - %s - %s' % (
# alert_context, graphite_previous_hours, metric_name, datapoint)
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# unencoded_graph_title = '%s %s - %s at %s hours - derivative graph - %s - %s' % (
# main_alert_title, app_alert_context, alert_context, graphite_previous_hours, metric_name, datapoint)
unencoded_graph_title = '%s %s - %s %s %s times - derivative graph - %s' % (
main_alert_title, app_alert_context, alert_context, str(metric_trigger),
str(alert_threshold), str(datapoint))
graph_title_string = quote(unencoded_graph_title, safe='')
graph_title = '&title=%s' % graph_title_string
# @added 20181126 - Bug #2498: Incorrect scale in some graphs
# Task #2742: Update Boundary
# If -xhours is used the scale is incorrect if x hours > than first
# retention period, passing from and until renders the graph with the
# correct scale.
graphite_port = '80'
if settings.GRAPHITE_PORT != '':
graphite_port = str(settings.GRAPHITE_PORT)
until_timestamp = int(time())
from_seconds_ago = graphite_previous_hours * 3600
from_timestamp = until_timestamp - from_seconds_ago
graphite_from = dt.datetime.fromtimestamp(int(from_timestamp)).strftime('%H:%M_%Y%m%d')
logger.info('graphite_from - %s' % str(graphite_from))
graphite_until = dt.datetime.fromtimestamp(int(until_timestamp)).strftime('%H:%M_%Y%m%d')
logger.info('graphite_until - %s' % str(graphite_until))
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# graphite_target = 'target=cactiStyle(%s)'
# @added 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
encoded_graphite_metric_name = encode_graphite_metric_name(skyline_app, metric_name)
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
# graphite_target = 'target=cactiStyle(%s,%%27si%%27)' % metric_name
graphite_target = 'target=cactiStyle(%s,%%27si%%27)' % encoded_graphite_metric_name
if known_derivative_metric:
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# graphite_target = 'target=cactiStyle(nonNegativeDerivative(%s))'
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
# graphite_target = 'target=cactiStyle(nonNegativeDerivative(%s),%%27si%%27)' % metric_name
graphite_target = 'target=cactiStyle(nonNegativeDerivative(%s),%%27si%%27)' % encoded_graphite_metric_name
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s:%s/render/?from=%s&until=%s&%s%s%s&colorList=%s' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, graphite_port,
# str(graphite_from), str(graphite_until), graphite_target,
# settings.GRAPHITE_GRAPH_SETTINGS, graph_title,
# graphite_graph_line_color)
link = '%s://%s:%s/%s/?from=%s&until=%s&%s%s%s&colorList=%s' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
graphite_port, settings.GRAPHITE_RENDER_URI, str(graphite_from),
str(graphite_until), graphite_target, settings.GRAPHITE_GRAPH_SETTINGS,
graph_title, graphite_graph_line_color)
content_id = metric_name
image_data = None
image_file = '%s/%s.%s.%s.alert_smtp.png' % (
settings.SKYLINE_TMP_DIR, skyline_app, str(until_timestamp),
metric_name)
if settings.BOUNDARY_SMTP_OPTS.get('embed-images'):
image_data = get_graphite_graph_image(skyline_app, link, image_file)
if settings.BOUNDARY_SMTP_OPTS.get('embed-images_disabled3290'):
# @modified 20191021 - Task #3290: Handle urllib2 in py3
# Branch #3262: py3
if python_version == 2:
try:
# @modified 20170913 - Task #2160: Test skyline with bandit
# Added nosec to exclude from bandit tests
# image_data = urllib2.urlopen(link).read() # nosec
image_data = None
except urllib2.URLError:
image_data = None
if python_version == 3:
try:
# image_data = urllib.request.urlopen(link).read() # nosec
image_data = None
except:
logger.error(traceback.format_exc())
logger.error('error :: boundary_alerters :: alert_smtp :: failed to urlopen %s' % str(link))
image_data = None
# If we failed to get the image or if it was explicitly disabled,
# use the image URL instead of the content.
if image_data is None:
img_tag = '<img src="%s"/>' % link
else:
img_tag = '<img src="cid:%s"/>' % content_id
# @modified 20191002 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
# body = '%s :: %s <br> Next alert in: %s seconds <br> skyline Boundary alert - %s <br><a href="%s">%s</a>' % (
# datapoint, metric_name, expiration_time, alert_context, link, img_tag)
body = '%s :: %s <br> Next alert in: %s seconds <br> %s %s alert - %s <br><a href="%s">%s</a>' % (
main_alert_title, app_alert_context, expiration_time, datapoint, metric_name, alert_context, link, img_tag)
# @added 20200122 - Branch #3002: docker
# Do not try to alert if the settings are default
send_email_alert = True
if 'your_domain.com' in str(sender):
logger.info('alert_smtp - sender is not configured, not sending alert')
send_email_alert = False
if 'your_domain.com' in str(primary_recipient):
logger.info('alert_smtp - sender is not configured, not sending alert')
send_email_alert = False
if 'example.com' in str(sender):
logger.info('alert_smtp - sender is not configured, not sending alert')
send_email_alert = False
if 'example.com' in str(primary_recipient):
logger.info('alert_smtp - sender is not configured, not sending alert')
send_email_alert = False
if DOCKER_FAKE_EMAIL_ALERTS:
logger.info('alert_smtp - DOCKER_FAKE_EMAIL_ALERTS is set to %s, not executing SMTP command' % str(DOCKER_FAKE_EMAIL_ALERTS))
send_email_alert = False
# @added 20200122 - Feature #3406: Allow for no_email SMTP_OPTS
no_email = False
if str(sender) == 'no_email':
send_email_alert = False
no_email = True
if str(primary_recipient) == 'no_email':
send_email_alert = False
no_email = True
if no_email:
logger.info('alert_smtp - no_email is set in BOUNDARY_SMTP_OPTS, not executing SMTP command')
# @modified 20180524 - Task #2384: Change alerters to cc other recipients
# Do not send to each recipient, send to primary_recipient and cc the other
# recipients, thereby sending only one email
# for recipient in recipients:
# @modified 20200122 - Feature #3406: Allow for no_email SMTP_OPTS
# if primary_recipient:
if primary_recipient and send_email_alert:
logger.info(
'alert_smtp - will send to primary_recipient :: %s, cc_recipients :: %s' %
(str(primary_recipient), str(cc_recipients)))
msg = MIMEMultipart('alternative')
# @modified 20191002 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
# msg['Subject'] = '[Skyline alert] ' + 'Boundary ALERT - ' + alert_context + ' - ' + datapoint + ' - ' + metric_name
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# msg['Subject'] = '[' + main_alert_title + ' alert] ' + app_alert_context + ' ALERT - ' + alert_context + ' - ' + datapoint + ' - ' + metric_name
email_subject = '[%s alert] %s ALERT - %s' % (
main_alert_title, app_alert_context, alert_context, metric_name)
msg['Subject'] = email_subject
msg['From'] = sender
# @modified 20180524 - Task #2384: Change alerters to cc other recipients
# msg['To'] = recipient
msg['To'] = primary_recipient
# @added 20180524 - Task #2384: Change alerters to cc other recipients
# Added Cc
if cc_recipients:
msg['Cc'] = cc_recipients
msg.attach(MIMEText(body, 'html'))
if image_data is not None:
# msg_attachment = MIMEImage(image_data)
fp = open(image_file, 'rb')
msg_attachment = MIMEImage(fp.read())
fp.close()
msg_attachment.add_header('Content-ID', '<%s>' % content_id)
msg.attach(msg_attachment)
s = SMTP('127.0.0.1')
# @modified 20180524 - Task #2384: Change alerters to cc other recipients
# Send to primary_recipient and cc_recipients
# s.sendmail(sender, recipient, msg.as_string())
try:
if cc_recipients:
s.sendmail(sender, [primary_recipient, cc_recipients], msg.as_string())
else:
s.sendmail(sender, primary_recipient, msg.as_string())
except:
logger.error(traceback.format_exc())
logger.error(
'error :: alert_smtp - could not send email to primary_recipient :: %s, cc_recipients :: %s' %
(str(primary_recipient), str(cc_recipients)))
s.quit()
# @added 20200825 - Feature #3704: Add alert to anomalies
if settings.PANORAMA_ENABLED:
added_panorama_alert_event = add_panorama_alert(skyline_app, int(metric_timestamp), metric_name)
if not added_panorama_alert_event:
logger.error(
'error :: failed to add Panorama alert event - panorama.alert.%s.%s' % (
str(metric_timestamp), metric_name))
def alert_pagerduty(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
if settings.PAGERDUTY_ENABLED:
import pygerduty
pager = pygerduty.PagerDuty(settings.BOUNDARY_PAGERDUTY_OPTS['subdomain'], settings.BOUNDARY_PAGERDUTY_OPTS['auth_token'])
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# pager.trigger_incident(settings.BOUNDARY_PAGERDUTY_OPTS['key'], 'Anomalous metric: %s (value: %s) - %s' % (metric_name, datapoint, algorithm))
pager.trigger_incident(settings.BOUNDARY_PAGERDUTY_OPTS['key'], 'Anomalous metric: %s (value: %s) - %s %s %s times' % (
metric_name, str(datapoint), algorithm, str(metric_trigger),
str(alert_threshold)))
# @added 20200825 - Feature #3704: Add alert to anomalies
if settings.PANORAMA_ENABLED:
added_panorama_alert_event = add_panorama_alert(skyline_app, int(metric_timestamp), metric_name)
if not added_panorama_alert_event:
logger.error(
'error :: failed to add Panorama alert event - panorama.alert.%s.%s' % (
str(metric_timestamp), metric_name))
else:
return False
def alert_hipchat(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp):
if settings.HIPCHAT_ENABLED:
sender = settings.BOUNDARY_HIPCHAT_OPTS['sender']
import hipchat
hipster = hipchat.HipChat(token=settings.BOUNDARY_HIPCHAT_OPTS['auth_token'])
# Allow for absolute path metric namespaces but also allow for and match
# match wildcard namepaces if there is not an absolute path metric namespace
rooms = 'unknown'
notify_rooms = []
matched_rooms = []
try:
rooms = settings.BOUNDARY_HIPCHAT_OPTS['rooms'][metric_name]
notify_rooms.append(rooms)
except:
for room in settings.BOUNDARY_HIPCHAT_OPTS['rooms']:
CHECK_MATCH_PATTERN = room
check_match_pattern = re.compile(CHECK_MATCH_PATTERN)
pattern_match = check_match_pattern.match(metric_name)
if pattern_match:
matched_rooms.append(room)
if matched_rooms != []:
for i_metric_name in matched_rooms:
rooms = settings.BOUNDARY_HIPCHAT_OPTS['rooms'][i_metric_name]
notify_rooms.append(rooms)
alert_algo = str(algorithm)
alert_context = alert_algo.upper()
unencoded_graph_title = 'Skyline Boundary - %s at %s hours - %s - %s' % (
alert_context, graphite_previous_hours, metric_name, datapoint)
graph_title_string = quote(unencoded_graph_title, safe='')
graph_title = '&title=%s' % graph_title_string
# @modified 20170706 - Support #2072: Make Boundary hipchat alerts show fixed timeframe
graphite_now = int(time())
target_seconds = int((graphite_previous_hours * 60) * 60)
from_timestamp = str(graphite_now - target_seconds)
until_timestamp = str(graphite_now)
graphite_from = datetime.datetime.fromtimestamp(int(from_timestamp)).strftime('%H:%M_%Y%m%d')
graphite_until = datetime.datetime.fromtimestamp(int(until_timestamp)).strftime('%H:%M_%Y%m%d')
if settings.GRAPHITE_PORT != '':
# link = '%s://%s:%s/render/?from=-%shours&target=cactiStyle(%s)%s%s&colorList=%s' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, settings.GRAPHITE_PORT,
# graphite_previous_hours, metric_name, settings.GRAPHITE_GRAPH_SETTINGS,
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s:%s/render/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=%s' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, settings.GRAPHITE_PORT,
# graphite_from, graphite_until, metric_name, settings.GRAPHITE_GRAPH_SETTINGS,
# graph_title, graphite_graph_line_color)
# @modified 20200417 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=%s' % (
link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(%s,%%27si%%27)%s%s&colorList=%s' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, settings.GRAPHITE_PORT,
settings.GRAPHITE_RENDER_URI, graphite_from, graphite_until,
metric_name, settings.GRAPHITE_GRAPH_SETTINGS, graph_title,
graphite_graph_line_color)
else:
# link = '%s://%s/render/?from=-%shour&target=cactiStyle(%s)%s%s&colorList=%s' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, graphite_previous_hours,
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s/render/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=%s' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST, graphite_from, graphite_until,
# metric_name, settings.GRAPHITE_GRAPH_SETTINGS, graph_title,
# graphite_graph_line_color)
# @modified 20200417 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=%s' % (
link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(%s,%%27si%%27)%s%s&colorList=%s' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
settings.GRAPHITE_RENDER_URI, graphite_from, graphite_until,
metric_name, settings.GRAPHITE_GRAPH_SETTINGS, graph_title,
graphite_graph_line_color)
embed_graph = "<a href='" + link + "'><img height='308' src='" + link + "'>" + metric_name + "</a>"
for rooms in notify_rooms:
for room in rooms:
hipster.method('rooms/message', method='POST', parameters={'room_id': room, 'from': 'skyline', 'color': settings.BOUNDARY_HIPCHAT_OPTS['color'], 'message': '%s - Boundary - %s - Anomalous metric: %s (value: %s) at %s hours %s' % (sender, algorithm, metric_name, datapoint, graphite_previous_hours, embed_graph)})
else:
return False
def alert_syslog(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
if settings.SYSLOG_ENABLED:
import sys
import syslog
syslog_ident = settings.SYSLOG_OPTS['ident']
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# message = str('Boundary - Anomalous metric: %s (value: %s) - %s' % (metric_name, datapoint, algorithm))
message = 'Boundary - Anomalous metric: %s (value: %s) - %s with %s %s times' % (
metric_name, str(datapoint), algorithm, str(metric_trigger),
str(alert_threshold))
if sys.version_info[:2] == (2, 6):
syslog.openlog(syslog_ident, syslog.LOG_PID, syslog.LOG_LOCAL4)
elif sys.version_info[:2] == (2, 7):
syslog.openlog(ident='skyline', logoption=syslog.LOG_PID, facility=syslog.LOG_LOCAL4)
elif sys.version_info[:1] == (3):
syslog.openlog(ident='skyline', logoption=syslog.LOG_PID, facility=syslog.LOG_LOCAL4)
else:
syslog.openlog(syslog_ident, syslog.LOG_PID, syslog.LOG_LOCAL4)
syslog.syslog(4, message)
else:
return False
# @added 20181126 - Task #2742: Update Boundary
# Feature #2618: alert_slack
def alert_slack(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
if not settings.SLACK_ENABLED:
return False
# @modified 20200701 - Task #3612: Upgrade to slack v2
# Task #3608: Update Skyline to Python 3.8.3 and deps
# Task #3556: Update deps
# slackclient v2 has a version function, < v2 does not
# from slackclient import SlackClient
try:
from slack import version as slackVersion
slack_version = slackVersion.__version__
except:
slack_version = '1.3'
if slack_version == '1.3':
from slackclient import SlackClient
else:
from slack import WebClient
metric = metric_name
logger.info('alert_slack - anomalous metric :: metric: %s - %s' % (metric, algorithm))
base_name = metric
alert_algo = str(algorithm)
alert_context = alert_algo.upper()
# The known_derivative_metric state is determine in case we need to surface
# the png image from Graphite if the Ionosphere image is not available for
# some reason. This will result in Skyline at least still sending an alert
# to slack, even if some gear fails in Ionosphere or slack alerting is used
# without Ionosphere enabled. Yes not DRY but multiprocessing and spawn
# safe.
known_derivative_metric = False
# try:
# if settings.REDIS_PASSWORD:
# # @modified 20191022 - Bug #3266: py3 Redis binary objects not strings
# # Branch #3262: py3
# # REDIS_ALERTER_CONN = redis.StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH)
# REDIS_ALERTER_CONN = redis.StrictRedis(password=settings.REDIS_PASSWORD, unix_socket_path=settings.REDIS_SOCKET_PATH, charset='utf-8', decode_responses=True)
# else:
# # REDIS_ALERTER_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
# REDIS_ALERTER_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH, charset='utf-8', decode_responses=True)
# except:
# logger.error('error :: alert_slack - redis connection failed')
# try:
# derivative_metrics = list(REDIS_ALERTER_CONN.smembers('derivative_metrics'))
# except:
# derivative_metrics = []
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# redis_metric_name not used
# redis_metric_name = '%s%s' % (settings.FULL_NAMESPACE, str(base_name))
# if redis_metric_name in derivative_metrics:
# known_derivative_metric = True
known_derivative_metric = is_derivative_metric(skyline_app, str(base_name))
# if known_derivative_metric:
# try:
# non_derivative_monotonic_metrics = settings.NON_DERIVATIVE_MONOTONIC_METRICS
# except:
# non_derivative_monotonic_metrics = []
# skip_derivative = in_list(redis_metric_name, non_derivative_monotonic_metrics)
# if skip_derivative:
# known_derivative_metric = False
# @added 20191008 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
try:
main_alert_title = settings.CUSTOM_ALERT_OPTS['main_alert_title']
except:
main_alert_title = 'Skyline'
try:
app_alert_context = settings.CUSTOM_ALERT_OPTS['boundary_alert_heading']
except:
app_alert_context = 'Boundary'
if known_derivative_metric:
# @modified 20191008 - Feature #3194: Add CUSTOM_ALERT_OPTS to settings
# unencoded_graph_title = 'Skyline Boundary - ALERT %s at %s hours - derivative graph - %s' % (
# alert_context, str(graphite_previous_hours), metric)
# slack_title = '*Skyline Boundary - ALERT* %s on %s at %s hours - derivative graph - %s' % (
# alert_context, metric, str(graphite_previous_hours), datapoint)
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# unencoded_graph_title = '%s %s - ALERT %s at %s hours - derivative graph - %s' % (
# main_alert_title, app_alert_context, alert_context, str(graphite_previous_hours), metric)
# slack_title = '*%s %s - ALERT* %s on %s at %s hours - derivative graph - %s' % (
# main_alert_title, app_alert_context, alert_context, metric, str(graphite_previous_hours), datapoint)
unencoded_graph_title = '%s %s - ALERT %s %s %s times - derivative graph - %s' % (
main_alert_title, app_alert_context, alert_context,
str(metric_trigger), str(alert_threshold), metric)
slack_title = '*%s %s - ALERT* %s %s %s times on %s - derivative graph - %s' % (
main_alert_title, app_alert_context, alert_context,
str(metric_trigger), str(alert_threshold), metric, str(datapoint))
else:
# unencoded_graph_title = 'Skyline Boundary - ALERT %s at %s hours - %s' % (
# alert_context, str(graphite_previous_hours), metric)
# slack_title = '*Skyline Boundary - ALERT* %s on %s at %s hours - %s' % (
# alert_context, metric, str(graphite_previous_hours), datapoint)
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# unencoded_graph_title = '%s %s - ALERT %s at %s hours - %s' % (
# main_alert_title, app_alert_context, alert_context, str(graphite_previous_hours), metric)
# slack_title = '*%s %s - ALERT* %s on %s at %s hours - %s' % (
# main_alert_title, app_alert_context, alert_context, metric, str(graphite_previous_hours), datapoint)
unencoded_graph_title = '%s %s - ALERT %s %s %s times - %s' % (
main_alert_title, app_alert_context, alert_context,
str(metric_trigger), str(alert_threshold), metric)
slack_title = '*%s %s - ALERT* %s %s %s times on %s - %s' % (
main_alert_title, app_alert_context, alert_context,
str(metric_trigger), str(alert_threshold), metric, str(datapoint))
graph_title_string = quote(unencoded_graph_title, safe='')
graph_title = '&title=%s' % graph_title_string
until_timestamp = int(time())
target_seconds = int((graphite_previous_hours * 60) * 60)
from_timestamp = str(until_timestamp - target_seconds)
graphite_from = dt.datetime.fromtimestamp(int(from_timestamp)).strftime('%H:%M_%Y%m%d')
logger.info('graphite_from - %s' % str(graphite_from))
graphite_until = dt.datetime.fromtimestamp(int(until_timestamp)).strftime('%H:%M_%Y%m%d')
logger.info('graphite_until - %s' % str(graphite_until))
# @added 20181025 - Feature #2618: alert_slack
# Added date and time info so you do not have to mouseover the slack
# message to determine the time at which the alert came in
timezone = strftime("%Z", gmtime())
# @modified 20181029 - Feature #2618: alert_slack
# Use the standard UNIX data format
# human_anomaly_time = dt.datetime.fromtimestamp(int(until_timestamp)).strftime('%Y-%m-%d %H:%M:%S')
human_anomaly_time = dt.datetime.fromtimestamp(int(until_timestamp)).strftime('%c')
slack_time_string = '%s %s' % (human_anomaly_time, timezone)
# @added 20191106 - Branch #3262: py3
# Branch #3002: docker
graphite_port = get_graphite_port(skyline_app)
graphite_render_uri = get_graphite_render_uri(skyline_app)
graphite_custom_headers = get_graphite_custom_headers(skyline_app)
# @added 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
encoded_graphite_metric_name = encode_graphite_metric_name(skyline_app, metric_name)
if settings.GRAPHITE_PORT != '':
if known_derivative_metric:
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s:%s/render/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s))%s%s&colorList=orange' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# settings.GRAPHITE_PORT, str(graphite_from), str(graphite_until),
# metric, settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s))%s%s&colorList=orange' % (
link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s),%%27si%%27)%s%s&colorList=orange' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# Branch #3262: py3
# Branch #3002: docker
# settings.GRAPHITE_PORT, settings.GRAPHITE_RENDER_URI,
graphite_port, graphite_render_uri,
str(graphite_from), str(graphite_until),
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
# metric, settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
encoded_graphite_metric_name, settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
else:
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s:%s/render/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=orange' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# settings.GRAPHITE_PORT, str(graphite_from), str(graphite_until),
# metric, settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=orange' % (
link = '%s://%s:%s/%s/?from=%s&until=%s&target=cactiStyle(%s,%%27si%%27)%s%s&colorList=orange' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# Branch #3262: py3
# Branch #3002: docker
# settings.GRAPHITE_PORT, settings.GRAPHITE_RENDER_URI,
graphite_port, graphite_render_uri,
# str(graphite_from), str(graphite_until), metric,
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
str(graphite_from), str(graphite_until), encoded_graphite_metric_name,
settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
else:
if known_derivative_metric:
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s/render/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s))%s%s&colorList=orange' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# str(graphite_from), str(graphite_until), metric,
# settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s))%s%s&colorList=orange' % (
link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(nonNegativeDerivative(%s),%%27si%%27)%s%s&colorList=orange' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
settings.GRAPHITE_RENDER_URI, str(graphite_from),
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
# str(graphite_until), metric, settings.GRAPHITE_GRAPH_SETTINGS,
str(graphite_until), encoded_graphite_metric_name, settings.GRAPHITE_GRAPH_SETTINGS,
graph_title)
else:
# @modified 20190520 - Branch #3002: docker
# Use GRAPHITE_RENDER_URI
# link = '%s://%s/render/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=orange' % (
# settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
# str(graphite_from), str(graphite_until), metric,
# settings.GRAPHITE_GRAPH_SETTINGS, graph_title)
# @modified 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
# link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(%s)%s%s&colorList=orange' % (
link = '%s://%s/%s/?from=%s&until=%s&target=cactiStyle(%s,%%27si%%27)%s%s&colorList=orange' % (
settings.GRAPHITE_PROTOCOL, settings.GRAPHITE_HOST,
settings.GRAPHITE_RENDER_URI, str(graphite_from),
# @modified 20201013 - Feature #3780: skyline_functions - sanitise_graphite_url
# str(graphite_until), metric, settings.GRAPHITE_GRAPH_SETTINGS,
str(graphite_until), encoded_graphite_metric_name, settings.GRAPHITE_GRAPH_SETTINGS,
graph_title)
# slack does not allow embedded images, nor will it fetch links behind
# authentication so Skyline uploads a png graphite image with the message
image_file = None
# Fetch the png from Graphite
# @modified 20191021 - Task #3290: Handle urllib2 in py3
# Branch #3262: py3
image_file = '%s/%s.%s.graphite.%sh.png' % (
settings.SKYLINE_TMP_DIR, base_name, skyline_app,
str(int(graphite_previous_hours)))
if python_version == 22:
try:
# image_data = urllib2.urlopen(link).read() # nosec
image_data = None
# except urllib2.URLError:
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_slack - failed to get image graph')
logger.error('error :: alert_slack - %s' % str(link))
image_data = None
if python_version == 33:
try:
image_file = '%s/%s.%s.graphite.%sh.png' % (
settings.SKYLINE_TMP_DIR, base_name, skyline_app,
str(int(graphite_previous_hours)))
# urllib.request.urlretrieve(link, image_file)
image_data = 'retrieved'
image_data = None
except:
try:
# @added 20191022 - Task #3294: py3 - handle system parameter in Graphite cactiStyle
image_data = None
original_traceback = traceback.format_exc()
if 'cactiStyle' in link:
metric_replace = '%s,%%27si%%27' % metric
original_link = link
link = link.replace(metric, metric_replace)
logger.info('link replaced with cactiStyle system parameter added - %s' % str(link))
urllib.request.urlretrieve(link, image_file)
image_data = 'retrieved'
except:
new_trackback = traceback.format_exc()
logger.error(original_traceback)
logger.error('error :: boundary_alerters :: alert_slack :: failed to urlopen %s' % str(original_link))
logger.error(new_trackback)
logger.error('error :: boundary_alerters :: alert_slack :: failed to urlopen with system parameter added %s' % str(link))
image_data = None
# @added 20191025 -
image_data = get_graphite_graph_image(skyline_app, link, image_file)
if image_data == 'disabled_for_testing':
image_file = '%s/%s.%s.graphite.%sh.png' % (
settings.SKYLINE_TMP_DIR, base_name, skyline_app,
str(int(graphite_previous_hours)))
if image_data != 'retrieved':
try:
write_data_to_file(skyline_app, image_file, 'w', image_data)
logger.info('alert_slack - added Graphite image :: %s' % (
image_file))
except:
logger.info(traceback.format_exc())
logger.error(
'error :: alert_slack - failed to add %s Graphite image' % (
image_file))
image_file = None
try:
filename = os.path.basename(image_file)
except:
filename = None
try:
bot_user_oauth_access_token = settings.BOUNDARY_SLACK_OPTS['bot_user_oauth_access_token']
except:
logger.error('error :: alert_slack - could not determine bot_user_oauth_access_token')
return False
# Allow for absolute path metric namespaces but also allow for and match
# match wildcard namepaces if there is not an absolute path metric namespace
channels = 'unknown'
notify_channels = []
matched_channels = []
try:
channels = settings.BOUNDARY_SLACK_OPTS['channels'][metric_name]
notify_channels.append(channels)
except:
for channel in settings.BOUNDARY_SLACK_OPTS['channels']:
CHECK_MATCH_PATTERN = channel
check_match_pattern = re.compile(CHECK_MATCH_PATTERN)
pattern_match = check_match_pattern.match(metric_name)
if pattern_match:
matched_channels.append(channel)
if matched_channels != []:
for i_metric_name in matched_channels:
channels = settings.BOUNDARY_SLACK_OPTS['channels'][i_metric_name]
notify_channels.append(channels)
if not notify_channels:
logger.error('error :: alert_slack - could not determine channel')
return False
else:
channels = notify_channels
try:
icon_emoji = settings.BOUNDARY_SLACK_OPTS['icon_emoji']
except:
icon_emoji = ':chart_with_upwards_trend:'
try:
# @modified 20200701 - Task #3612: Upgrade to slack v2
# Task #3608: Update Skyline to Python 3.8.3 and deps
# Task #3556: Update deps
# sc = SlackClient(bot_user_oauth_access_token)
if slack_version == '1.3':
sc = SlackClient(bot_user_oauth_access_token)
else:
sc = WebClient(bot_user_oauth_access_token, timeout=10)
except:
logger.info(traceback.format_exc())
logger.error('error :: alert_slack - could not initiate SlackClient')
return False
# @added 20200815 - Bug #3676: Boundary slack alert errors
# Task #3608: Update Skyline to Python 3.8.3 and deps
# Task #3612: Upgrade to slack v2
# Strange only Boundary slack messages are erroring on a tuple or part
# thereof, mirage_alerters using the same method are fine???
# The server responded with: {'ok': False, 'error': 'invalid_channel', 'channel': "('#skyline'"}
# This fix handles converting tuple items into list items where the channel
# is a tuple.
channels_list = []
for channel in channels:
if type(channel) == tuple:
for ichannel in channel:
channels_list.append(str(ichannel))
else:
channels_list.append(str(channel))
if channels_list:
channels = channels_list
for channel in channels:
initial_comment = slack_title + ' :: <' + link + '|graphite image link>\nFor anomaly at ' + slack_time_string
# @added 20201127 - Feature #3820: HORIZON_SHARDS
# Add the origin and shard for debugging purposes
if HORIZON_SHARDS:
initial_comment = initial_comment + ' - from ' + this_host + ' (shard ' + str(HORIZON_SHARD) + ')'
try:
# slack does not allow embedded images, nor links behind authentication
# or color text, so we have jump through all the API hoops to end up
# having to upload an image with a very basic message.
if os.path.isfile(image_file):
# @modified 20200701 - Task #3612: Upgrade to slack v2
# Task #3608: Update Skyline to Python 3.8.3 and deps
# Task #3556: Update deps
if slack_version == '1.3':
slack_file_upload = sc.api_call(
'files.upload', filename=filename, channels=channel,
initial_comment=initial_comment, file=open(image_file, 'rb'))
else:
slack_file_upload = sc.files_upload(
filename=filename, channels=channel,
initial_comment=initial_comment, file=open(image_file, 'rb'))
if not slack_file_upload['ok']:
logger.error('error :: alert_slack - failed to send slack message with file upload')
logger.error('error :: alert_slack - slack_file_upload - %s' % str(slack_file_upload))
try:
os.remove(image_file)
except OSError:
logger.error('error - failed to remove %s, continuing' % image_file)
pass
else:
send_text = initial_comment + ' :: error :: there was no graph image to upload'
send_message = sc.api_call(
'chat.postMessage',
channel=channel,
icon_emoji=icon_emoji,
text=send_text)
if not send_message['ok']:
logger.error('error :: alert_slack - failed to send slack message')
else:
logger.info('alert_slack - sent slack message')
except:
logger.info(traceback.format_exc())
logger.error('error :: alert_slack - could not upload file')
return False
# @added 20200825 - Feature #3704: Add alert to anomalies
if settings.PANORAMA_ENABLED:
added_panorama_alert_event = add_panorama_alert(skyline_app, int(metric_timestamp), metric_name)
if not added_panorama_alert_event:
logger.error(
'error :: failed to add Panorama alert event - panorama.alert.%s.%s' % (
str(metric_timestamp), metric_name))
# @added 20200122: Feature #3396: http_alerter
def alert_http(alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
"""
Called by :func:`~trigger_alert` and sends and resend anomalies to a http
endpoint.
"""
if settings.HTTP_ALERTERS_ENABLED:
alerter_name = alerter
alerter_enabled = False
try:
alerter_enabled = settings.HTTP_ALERTERS_OPTS[alerter_name]['enabled']
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http failed to determine the enabled from settings.HTTP_ALERTERS_OPTS for alerter - %s and metric %s with algorithm %s' % (
str(alerter), str(metric_name), algorithm))
if not alerter_enabled:
logger.info('alert_http - %s enabled %s, not alerting' % (
str(alerter_name), str(alerter_enabled)))
return
alerter_endpoint = False
try:
alerter_endpoint = settings.HTTP_ALERTERS_OPTS[alerter_name]['endpoint']
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http failed to determine the endpoint from settings.HTTP_ALERTERS_OPTS for alert - %s and metric %s with algorithm %s' % (
str(alerter), str(metric_name), algorithm))
if not alerter_endpoint:
logger.error('alert_http - no endpoint set for %s, not alerting' % (
str(alerter_name)))
return
alerter_token = None
try:
alerter_token = settings.HTTP_ALERTERS_OPTS[alerter_name]['token']
except:
pass
source = 'boundary'
metric_alert_dict = {}
alert_data_dict = {}
try:
timestamp_str = str(metric_timestamp)
value_str = str(datapoint)
full_duration_str = str(int(full_duration_seconds))
expiry_str = str(expiration_time)
metric_alert_dict = {
"metric": metric_name,
"algorithm": algorithm,
"timestamp": timestamp_str,
"value": value_str,
"full_duration": full_duration_str,
"expiry": expiry_str,
# @added 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
"metric_trigger": metric_trigger,
"alert_threshold": alert_threshold,
"source": str(source),
"token": str(alerter_token)
}
# @modified 20200302: Feature #3396: http_alerter
# Add the token as an independent entity from the alert
# alert_data_dict = {"status": {}, "data": {"alert": metric_alert_dict}}
alerter_token_str = str(alerter_token)
# @modified 20201127 - Feature #3820: HORIZON_SHARDS
# Add the origin and shard to status for debugging purposes
if not HORIZON_SHARDS:
alert_data_dict = {"status": {}, "data": {"token": alerter_token_str, "alert": metric_alert_dict}}
else:
alert_data_dict = {"status": {"origin": this_host, "shard": HORIZON_SHARD}, "data": {"token": alerter_token_str, "alert": metric_alert_dict}}
logger.info('alert_http :: alert_data_dict to send - %s' % str(alert_data_dict))
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http failed to construct the alert data for %s from alert - %s and metric - %s' % (
str(alerter_name), str(algorithm), str(metric_name)))
return
in_resend_queue = False
redis_set = '%s.http_alerter.queue' % str(source)
resend_queue = None
previous_attempts = 0
REDIS_HTTP_ALERTER_CONN_DECODED = get_redis_conn_decoded(skyline_app)
try:
resend_queue = REDIS_HTTP_ALERTER_CONN_DECODED.smembers(redis_set)
except Exception as e:
logger.error('error :: alert_http :: could not query Redis for %s - %s' % (redis_set, e))
if REDIS_HTTP_ALERTER_CONN_DECODED:
try:
del REDIS_HTTP_ALERTER_CONN_DECODED
except:
pass
if resend_queue:
try:
for index, resend_item in enumerate(resend_queue):
resend_item_list = literal_eval(resend_item)
# resend_alert = literal_eval(resend_item_list[0])
# resend_metric = literal_eval(resend_item_list[1])
resend_metric_alert_dict = literal_eval(resend_item_list[2])
if resend_metric_alert_dict['metric'] == metric_name:
if int(resend_metric_alert_dict['timestamp']) == int(metric_timestamp):
previous_attempts = int(resend_metric_alert_dict['attempts'])
in_resend_queue = True
break
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http failed iterate to resend_queue')
# REDIS_HTTP_ALERTER_CONN = None
# if in_resend_queue:
# REDIS_HTTP_ALERTER_CONN = get_redis_conn(skyline_app)
REDIS_HTTP_ALERTER_CONN = get_redis_conn(skyline_app)
add_to_resend_queue = False
fail_alerter = False
if alert_data_dict and alerter_endpoint:
# @modified 20200403 - Feature #3396: http_alerter
# Changed timeouts from 2, 2 to 5, 20
connect_timeout = 5
read_timeout = 20
if requests.__version__ >= '2.4.0':
use_timeout = (int(connect_timeout), int(read_timeout))
else:
use_timeout = int(connect_timeout)
if settings.ENABLE_DEBUG:
logger.debug('debug :: use_timeout - %s' % (str(use_timeout)))
response = None
try:
response = requests.post(alerter_endpoint, json=alert_data_dict, timeout=use_timeout)
except:
logger.error(traceback.format_exc())
logger.error('error :: failed to post alert to %s - %s' % (
str(alerter_name), str(alert_data_dict)))
add_to_resend_queue = True
response = None
if in_resend_queue:
try:
REDIS_HTTP_ALERTER_CONN.srem(redis_set, str(resend_item))
logger.info('alert_http :: alert removed from %s' % (
str(redis_set)))
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http :: failed remove %s from Redis set %s' % (
str(resend_item), redis_set))
# @added 20200310 - Feature #3396: http_alerter
# When the response code is 401 the response object appears to be
# False, although the response.code and response.reason are set
try:
if response.status_code != 200:
logger.error('error :: alert_http :: %s %s responded with status code %s and reason %s' % (
str(alerter_name), str(alerter_endpoint),
str(response.status_code), str(response.reason)))
add_to_resend_queue = True
fail_alerter = True
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http :: failed determine response.status_code')
if response:
if response.status_code == 200:
logger.info('alert_http :: alert sent to %s - %s' % (
str(alerter_endpoint), str(alert_data_dict)))
if in_resend_queue:
logger.info('alert_http :: alert removed from %s after %s attempts to send' % (
str(redis_set), str(previous_attempts)))
try:
del REDIS_HTTP_ALERTER_CONN
except:
pass
# @added 20200825 - Feature #3704: Add alert to anomalies
if settings.PANORAMA_ENABLED:
added_panorama_alert_event = add_panorama_alert(skyline_app, int(metric_timestamp), metric_name)
if not added_panorama_alert_event:
logger.error(
'error :: failed to add Panorama alert event - panorama.alert.%s.%s' % (
str(metric_timestamp), metric_name))
return
else:
logger.error('error :: alert_http :: %s %s responded with status code %s and reason %s' % (
str(alerter_name), str(alerter_endpoint),
str(response.status_code), str(response.reason)))
add_to_resend_queue = True
fail_alerter = True
else:
logger.error('error :: alert_http :: %s %s did not respond' % (
str(alerter_name), str(alerter_endpoint)))
add_to_resend_queue = True
fail_alerter = True
number_of_send_attempts = previous_attempts + 1
metric_alert_dict['attempts'] = number_of_send_attempts
if add_to_resend_queue:
data = [alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, str(metric_alert_dict)]
logger.info('alert_http :: adding alert to %s after %s attempts to send - %s' % (
str(redis_set), str(number_of_send_attempts), str(metric_alert_dict)))
try:
# redis_conn.sadd(redis_set, str(metric_alert_dict))
REDIS_HTTP_ALERTER_CONN.sadd(redis_set, str(data))
except:
logger.error(traceback.format_exc())
logger.error('error :: alert_http :: failed to add %s from Redis set %s' % (
str(metric_alert_dict), redis_set))
# Create a Redis if there was a bad or no response from the
# alerter_endpoint, to ensure that Boundary does not loop through
# every alert in the queue for an alerter_endpoint, if the
# alerter_endpoint is down
if fail_alerter:
alerter_endpoint_cache_key = 'http_alerter.down.%s' % str(alerter_name)
logger.error('error :: alert_http :: alerter_endpoint %s failed adding Redis key %s' % (
str(alerter_endpoint), str(alerter_endpoint_cache_key)))
if REDIS_HTTP_ALERTER_CONN:
try:
failed_timestamp = int(time())
REDIS_HTTP_ALERTER_CONN.setex(alerter_endpoint_cache_key, 60, failed_timestamp)
except:
logger.error(traceback.format_exc())
logger.error('error :: failed to set Redis key %s' % alerter_endpoint_cache_key)
try:
del REDIS_HTTP_ALERTER_CONN
except:
pass
else:
logger.info('alert_http :: settings.HTTP_ALERTERS_ENABLED not enabled nothing to do')
return
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
# def trigger_alert(alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp):
def trigger_alert(alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold):
if alerter == 'smtp':
strategy = 'alert_smtp'
# @added 20200122: Feature #3396: http_alerter
# Added http_alerter
elif 'http_alerter' in alerter:
strategy = 'alert_http'
else:
strategy = 'alert_%s' % alerter
try:
if strategy == 'alert_http':
# @modified 20201207 - Task #3878: Add metric_trigger and alert_threshold to Boundary alerts
getattr(boundary_alerters, strategy)(alerter, datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold)
else:
getattr(boundary_alerters, strategy)(datapoint, metric_name, expiration_time, metric_trigger, algorithm, metric_timestamp, alert_threshold)
except:
logger.error(traceback.format_exc())
logger.error('error :: alerters - %s - getattr error' % strategy)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.