blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f564700cdfd0e1bf8abdaccfd4573018373190c7 | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_system_locallog_syslogd_setting.py | a18bf7256e012bbdd51940685d4d6cc86acf3d10 | [
"MIT"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 12,332 | py | #!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_system_locallog_syslogd_setting
short_description: Settings for remote syslog server.
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
system_locallog_syslogd_setting:
description: the top level parameters set
required: false
type: dict
suboptions:
csv:
type: str
default: 'disable'
description:
- 'CSV format.'
- 'disable - Disable CSV format.'
- 'enable - Enable CSV format.'
choices:
- 'disable'
- 'enable'
facility:
type: str
default: 'local7'
description:
- 'Remote syslog facility.'
- 'kernel - Kernel messages.'
- 'user - Random user-level messages.'
- 'ntp - NTP daemon.'
- 'audit - Log audit.'
- 'alert - Log alert.'
- 'clock - Clock daemon.'
- 'mail - Mail system.'
- 'daemon - System daemons.'
- 'auth - Security/authorization messages.'
- 'syslog - Messages generated internally by syslog daemon.'
- 'lpr - Line printer subsystem.'
- 'news - Network news subsystem.'
- 'uucp - Network news subsystem.'
- 'cron - Clock daemon.'
- 'authpriv - Security/authorization messages (private).'
- 'ftp - FTP daemon.'
- 'local0 - Reserved for local use.'
- 'local1 - Reserved for local use.'
- 'local2 - Reserved for local use.'
- 'local3 - Reserved for local use.'
- 'local4 - Reserved for local use.'
- 'local5 - Reserved for local use.'
- 'local6 - Reserved for local use.'
- 'local7 - Reserved for local use.'
choices:
- 'kernel'
- 'user'
- 'ntp'
- 'audit'
- 'alert'
- 'clock'
- 'mail'
- 'daemon'
- 'auth'
- 'syslog'
- 'lpr'
- 'news'
- 'uucp'
- 'cron'
- 'authpriv'
- 'ftp'
- 'local0'
- 'local1'
- 'local2'
- 'local3'
- 'local4'
- 'local5'
- 'local6'
- 'local7'
severity:
type: str
default: 'notification'
description:
- 'Least severity level to log.'
- 'emergency - Emergency level.'
- 'alert - Alert level.'
- 'critical - Critical level.'
- 'error - Error level.'
- 'warning - Warning level.'
- 'notification - Notification level.'
- 'information - Information level.'
- 'debug - Debug level.'
choices:
- 'emergency'
- 'alert'
- 'critical'
- 'error'
- 'warning'
- 'notification'
- 'information'
- 'debug'
status:
type: str
default: 'disable'
description:
- 'Remote syslog log.'
- 'disable - Do not log to remote syslog server.'
- 'enable - Log to remote syslog server.'
choices:
- 'disable'
- 'enable'
syslog-name:
type: str
description: 'Remote syslog server name.'
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: Settings for remote syslog server.
fmgr_system_locallog_syslogd_setting:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
system_locallog_syslogd_setting:
csv: <value in [disable, enable]>
facility: <value in [kernel, user, ntp, ...]>
severity: <value in [emergency, alert, critical, ...]>
status: <value in [disable, enable]>
syslog-name: <value of string>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/cli/global/system/locallog/syslogd/setting'
]
perobject_jrpc_urls = [
'/cli/global/system/locallog/syslogd/setting/{setting}'
]
url_params = []
module_primary_key = None
module_arg_spec = {
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'system_locallog_syslogd_setting': {
'required': False,
'type': 'dict',
'options': {
'csv': {
'required': False,
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'facility': {
'required': False,
'choices': [
'kernel',
'user',
'ntp',
'audit',
'alert',
'clock',
'mail',
'daemon',
'auth',
'syslog',
'lpr',
'news',
'uucp',
'cron',
'authpriv',
'ftp',
'local0',
'local1',
'local2',
'local3',
'local4',
'local5',
'local6',
'local7'
],
'type': 'str'
},
'severity': {
'required': False,
'choices': [
'emergency',
'alert',
'critical',
'error',
'warning',
'notification',
'information',
'debug'
],
'type': 'str'
},
'status': {
'required': False,
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'syslog-name': {
'required': False,
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'system_locallog_syslogd_setting'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_partial_curd()
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9957cfa6dcae5f7c12edbbbee6687a70d54e3523 | 0fc2b99fd8414dbce5f1f6057b9b800c968d5d05 | /lpbio/swarm/__init__.py | 293bff76afe1cd97181eb75f0eed192cfe8d24a1 | [
"MIT"
] | permissive | widdowquinn/lpbio | 9df898cb9580f62da1f66d5736cbf7a984633561 | 8b95642396d05a56c1c54389e3de6d88d7cbffb5 | refs/heads/master | 2020-03-29T02:08:56.675473 | 2019-11-07T14:27:44 | 2019-11-07T14:27:44 | 149,422,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,395 | py | # -*- coding: utf-8 -*-
"""Code for interaction with the Swarm clustering tool."""
import os
import shlex
import shutil
import subprocess
from collections import namedtuple
from lpbio import LPBioNotExecutableError, is_exe
class SwarmError(Exception):
"""Exception raised when swarm fails"""
def __init__(self, msg):
self.message = msg
# factory class for Swarm run returned values
SwarmRun = namedtuple("SwarmRun", "command outfilename stdout stderr")
# factory class for Swarm parameter values
SwarmParameters = namedtuple("SwarmParameters", "t d")
SwarmParameters.__new__.__defaults__ = (1, 1)
def build_cmd(infname, outfname, parameters):
"""Build a command-line for swarm"""
params = [
"-{0} {1}".format(shlex.quote(str(k)), shlex.quote(str(v)))
for k, v in parameters._asdict().items()
if v is not None
]
cmd = ["swarm", *params, "-o", shlex.quote(outfname), shlex.quote(infname)]
return cmd
class Swarm(object):
"""Class for working with SWARM"""
def __init__(self, exe_path):
"""Instantiate with location of executable"""
exe_path = shlex.quote(shutil.which(exe_path))
if not os.access(exe_path, os.X_OK):
msg = "{0} is not an executable".format(exe_path)
raise LPBioNotExecutableError(msg)
self._exe_path = exe_path
def run(self, infname, outdir, parameters, dry_run=False):
"""Run swarm to cluster sequences in the passed file
- infname - path to sequences for clustering
- outdir - output directory for clustered output
- parameters - named tuple of Swarm parameters
- dry_run - if True returns cmd-line but does not run
Returns namedtuple with form:
"command outfilename stdout stderr"
"""
self.__build_cmd(infname, outdir, parameters)
if dry_run:
return self._cmd
pipe = subprocess.run(
self._cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
shell=False,
)
results = SwarmRun(self._cmd, self._outfname, pipe.stdout, pipe.stderr)
return results
def __build_cmd(self, infname, outdir, parameters):
"""Build a command-line for swarm"""
self._outfname = os.path.join(shlex.quote(outdir), "swarm.out")
self._cmd = build_cmd(infname, self._outfname, parameters)
class SwarmCluster(object):
"""Describes a single Swarm cluster"""
def __init__(self, amplicons, parent=None):
self._amplicons = tuple(sorted(amplicons))
if parent:
self._parent = parent
def __len__(self):
"""Returns the number of amplicons in the cluster"""
return len(self._amplicons)
def __getitem__(self, item):
"""Return sequence IDs from the swarm like a list"""
return self._amplicons[item]
@property
def amplicons(self):
"""The amplicons in a swarm cluster"""
return self._amplicons
@property
def abundance(self):
"""Returns the total abundance of all amplicons in the cluster"""
return sum(self.abundances)
@property
def abundances(self):
"""Returns a list of abundance of each amplicons in the cluster"""
return [int(amp.split("_")[-1]) for amp in self._amplicons]
class SwarmResult(object):
"""Describes the contents of a Swarm output file"""
def __init__(self, name):
self._name = name
self._clusters = list()
def add_swarm(self, amplicons):
"""Adds a list of amplicon IDs as a SwarmCluster"""
self._clusters.append(SwarmCluster(amplicons, self))
def __eq__(self, other):
"""Returns True if all swarms match all swarms in passed result"""
# this test relies on the amplicons being ordered tuples
these_amplicons = {c.amplicons for c in self._clusters}
other_amplicons = {c.amplicons for c in other._clusters}
return these_amplicons == other_amplicons
def __len__(self):
"""Returns the number of swarms in the result"""
return len(self._clusters)
def __str__(self):
"""Return human-readable representation of the SwarmResult"""
outstr = "\n".join(
["SwarmResult: {}".format(self.name), "\tSwarms: {}".format(len(self))]
)
swarmstr = []
for idx, swarm in enumerate(self._clusters):
swarmstr.append("\t\tSwarm {}, size: {}".format(idx, len(swarm)))
swarmstr = "\n".join(swarmstr)
return "\n".join([outstr + swarmstr])
def __getitem__(self, item):
"""Return swarm clusters like a list"""
return self._clusters[item]
@property
def swarms(self):
"""The clusters produced by a swarm run"""
return self._clusters[:]
@property
def name(self):
"""The swarm result filename"""
return self._name
class SwarmParser(object):
"""Parser for Swarm cluster output"""
@classmethod
def read(SwarmParser, fname):
"""Parses the passed Swarm output file into a SwarmResult"""
result = SwarmResult(fname)
with open(fname, "r") as swarms:
for swarm in swarms:
result.add_swarm(swarm.strip().split())
return result
def __init__(self):
pass
| [
"[email protected]"
] | |
7f128626999fdb25a08a0b49abd7399c216ba13b | 4c83b4d7aca6bbcd15b922ad7314440fea7c9a70 | /2020-07-27_modo_horario_cp_onda1_10d/script_modo_horario_2020-03-05_fchk_1036.py | d462e860f46dceffa88ab39003f27bd308ae8ade | [] | no_license | poloplanejamento/odmatrix-joinville | 63b60a85055700698cdb590c181e7c8a4d5c7361 | be7ce0814fb9dad2d289cd836dde51baa9c0850d | refs/heads/main | 2023-01-23T11:43:45.451126 | 2020-12-10T23:17:58 | 2020-12-10T23:17:58 | 320,402,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,339 | py | #!/usr/bin/env python3
# Bibliotecas
from http.client import HTTPSConnection
from base64 import b64encode
import json
import csv
import pandas as pd
# Variáveis
projectID = "40" # ID do projeto, conforme mostrado no frontend Web
c = HTTPSConnection("api.odmatrix.app")
userAndPass = b64encode(b"fe6b53f0280443d5bd40d5d30694f356").decode("ascii")
headers = { 'Authorization' : 'Basic %s' % userAndPass }
finall_list = []
# Formato da data: AAAA-MM-DD. Até três datas no array e o script parece rodar sem problemas
# Datas desejadas: 12/11, 13/11, 19/11, 20/11, 21/11, 03/03, 04/03, 05/03, 11/03 e 12/03
for date in ["2020-03-05"] :
for ftriptype in ["microtrip","bus","private_transport"] :
for ftimeorigin in ["0000_0059","0100_0159","0200_0259","0300_0359","0400_0459","0500_0559","0600_0659","0700_0759","0800_0859","0900_0959","1000_1059","1100_1159","1200_1259","1300_1359","1400_1459","1500_1559","1600_1659","1700_1759","1800_1859","1900_1959","2000_2059","2100_2159","2200_2259","2300_2359"] :
print(ftimeorigin)
request = "/generatematrix?format=json&project={}&date={}&ftriptype={}&ftimeorigin={}&fchk_1036=true".format(projectID, date, ftriptype, ftimeorigin)
c.request('GET', request, headers=headers)
res = c.getresponse()
data = res.read()
matrix = json.loads(data)
for i, column in enumerate(matrix['ColumnLabels']):
for j, row in enumerate(matrix['RowLabels']):
value = matrix['Data'][j][i]
if value == 0:
continue
full_row = {}
full_row['ProjectID'] = projectID
full_row['Date'] = date
full_row['TimeOrigin'] = ftimeorigin
full_row['Origin'] = row
full_row['Destination'] = column
full_row['Modo'] = ftriptype
full_row['Trips'] = value
finall_list.append(full_row)
print(full_row)
#print(finall_list)
data = pd.DataFrame(finall_list)
final_data = pd.pivot_table(data, index=['ProjectID', 'Date', 'Origin', 'Destination', 'Modo'], columns='TimeOrigin', values='Trips')
final_data.to_csv("OD_por_modo_horario_fchk_1036_2020-03-05.csv")
| [
"[email protected]"
] | |
10c9a1c4063a9dbc167bea682133f4b74469d7c2 | bce4a906faebfcab5db0e48ad587841d9ef3e74c | /train.py | 6b93bc7a9fa165787181e7fd0a4ad5c992af7b36 | [
"MIT"
] | permissive | scofield77/pytorch-action-recognition-toy | aecddd6da3c032ca25c2bd1facf6fc8f6d72b4e4 | de0f8820c40d09d34a61849ee572f8af37f5725d | refs/heads/master | 2020-07-20T21:39:46.453236 | 2019-03-05T00:52:39 | 2019-03-05T00:52:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,463 | py | import torch
import numpy as np
import torch.nn as nn
import torch.utils.data
from net import ActionLSTM
from data import ActionDataset
from tensorboardX import SummaryWriter
if __name__ == '__main__':
batch_size = 8
dataset = ActionDataset()
data_loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True)
net = ActionLSTM()
criterion = nn.MSELoss()
optimizer = torch.optim.SGD(net.parameters(), lr=0.01)
writer = SummaryWriter('./log')
sample = iter(data_loader).__next__()
global_step = 0
for i in range(40):
for j, sample in enumerate(data_loader):
global_step += 1
net.zero_grad()
optimizer.zero_grad()
out = net(sample['values'])
loss_value = criterion(out, sample['label'])
pred = np.argmax(out.detach().numpy(), -1)
tags = sample['raw_label'].detach().numpy()
accuracy = float(np.where(pred == tags, 1, 0).sum() / batch_size)
print(
'Epoch {}, Itertaion {}, Loss = {}, Accuracy = {:.2f} %'.format(i + 1, j + 1, loss_value, accuracy * 100))
writer.add_scalar('loss', loss_value, global_step=global_step)
writer.add_scalar('accuracy', accuracy, global_step=global_step)
loss_value.backward()
optimizer.step()
writer.close()
state_dict = net.state_dict()
torch.save(state_dict, 'model.pth')
| [
"[email protected]"
] | |
a24a2db0228e5412adc4204bef7d76d48aec585c | 7897caaabb1a963b98e723a6abfc53b99d300a86 | /view/widgets/gallery/gallery.py | f183aba043c9138771a54a844dad6fd3c001e1d8 | [
"MIT"
] | permissive | Wajdi-Mabroukeh/CvStudio | f6983c1372c29930e23c8a0d1775193cebf045be | b798ef7b3db7505a151f2df9cf55a6ce5828baa4 | refs/heads/master | 2021-04-20T14:10:08.228504 | 2020-03-20T02:34:34 | 2020-03-20T02:34:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,327 | py | import mimetypes
import os
from enum import Enum,auto
import cv2
import dask
import imutils
import numpy as np
from PyQt5 import QtGui,QtCore
from PyQt5.QtCore import QObject,QSize,pyqtSignal,QThreadPool
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import QWidget,QGridLayout,QLabel,QLayoutItem,QVBoxLayout
from hurry.filesize import size,alternative
from util import GUIUtilities,MiscUtilities,Worker
from view.widgets.image_button import ImageButton
from view.widgets.loading_dialog import QLoadingDialog
from .base_gallery import Ui_Gallery
from .card import GalleryCard,ImageCard
class GalleryViewMode(Enum):
GRID_MODE=auto()
LIST_MODE=auto()
class GalleryLayout(QGridLayout,QObject):
def __init__(self,parent=None):
super(GalleryLayout,self).__init__(parent)
self.setAlignment(QtCore.Qt.AlignTop)
self._items=[]
self._view_mode=GalleryViewMode.GRID_MODE
self._cols=8
@property
def view_mode(self):
return self._view_mode
@view_mode.setter
def view_mode(self,value):
self._view_mode=value
@property
def items(self):
return self._items
@items.setter
def items(self,value):
self._items=value
@property
def cols(self):
return self._cols
@cols.setter
def cols(self,value):
self._cols=value
self.notify_property_changed()
def notify_property_changed(self):
self.update()
def arrange(self) -> None:
self.clear()
if len(self.items) > 0:
row=col=0
n=max(len(self.items),self.cols)
for idx in range(n):
self.setColumnStretch(col,1)
self.setRowStretch(row,1)
if idx < len(self.items):
widget=self.items[idx]
self.addWidget(widget,row,col)
else:
self.addWidget(QWidget(),row,col)
col+=1
if col%self.cols == 0:
row+=1
col=0
def initialize(self,n_items):
self.clear()
row=col=0
n=max(n_items,self.cols)
for idx in range(n):
self.setColumnStretch(col,1)
self.setRowStretch(row,1)
self.addWidget(QWidget(),row,col)
col+=1
if col%self.cols == 0:
row+=1
col=0
def add_item(self,widget: QWidget):
if self.rowCount() > 0:
cols=self.columnCount()
rows=self.rowCount()
for r in range(rows):
for c in range(cols):
item: QLayoutItem=self.itemAtPosition(r,c)
if not isinstance(item.widget(),type(widget)):
self.removeWidget(item.widget())
self.addWidget(widget,r,c)
self.items.append(widget)
# self.update()
return
def clear(self):
GUIUtilities.clear_layout(self) # clear the gridlayout
class Gallery(QWidget,Ui_Gallery,QObject):
doubleClicked=pyqtSignal(GalleryCard,QWidget)
filesDropped=pyqtSignal(list)
cardActionClicked=pyqtSignal(str,object)
def __init__(self,parent=None):
super(Gallery,self).__init__(parent)
self.setupUi(self)
self.setup_toolbar()
self.setup_paginator()
self._items: []=[]
self._pages=[]
self._page_size=50
self._curr_page=0
self._thread_pool=QThreadPool()
self.setAcceptDrops(True)
self.center_widget=None
self.center_layout=None
self._content_type="Images"
self._tag=None
self._actions=[]
self._loading_dialog=QLoadingDialog(parent=self)
def setup_toolbar(self):
uncheck_all_icon=GUIUtilities.get_icon("uncheck_all.png")
self.btn_uncheck_all=ImageButton(icon=uncheck_all_icon,size=QSize(20,20))
check_all_icon=GUIUtilities.get_icon("check_all.png")
self.btn_check_all=ImageButton(icon=check_all_icon,size=QSize(20,20))
self.btn_check_all.setFixedWidth(40)
self.btn_uncheck_all.setFixedWidth(40)
self.btn_check_all.clicked.connect(self.btn_check_all_on_click_slot)
self.btn_uncheck_all.clicked.connect(self.btn_uncheck_all_on_click_slot)
@property
def actions(self):
return self._actions
@actions.setter
def actions(self,value):
self._actions=value
@property
def content_type(self):
return self._content_type
@content_type.setter
def content_type(self,value):
self._content_type=value
def enable_paginator(self,val):
self.btn_check_all.setEnabled(val)
self.btn_uncheck_all.setEnabled(val)
self.btn_next_page.setEnabled(val)
self.btn_prev_page.setEnabled(val)
self.btn_last_page.setEnabled(val)
self.btn_first_page.setEnabled(val)
def setup_paginator(self):
self.grid_actions_layout.addWidget(self.btn_check_all)
self.grid_actions_layout.addWidget(self.btn_uncheck_all)
self.btn_next_page.clicked.connect(self.btn_next_page_on_click)
self.btn_prev_page.clicked.connect(self.btn_prev_page_on_click)
self.btn_last_page.clicked.connect(self.btn_last_page_on_click)
self.btn_first_page.clicked.connect(self.btn_first_page_on_click)
self.btn_first_page.setIcon(GUIUtilities.get_icon("first.png"))
self.btn_prev_page.setIcon(GUIUtilities.get_icon("left.png"))
self.btn_next_page.setIcon(GUIUtilities.get_icon("right.png"))
self.btn_last_page.setIcon(GUIUtilities.get_icon("last.png"))
self.btn_first_page.setStyleSheet('QPushButton{border: 0px solid;}')
self.btn_prev_page.setStyleSheet('QPushButton{border: 0px solid;}')
self.btn_last_page.setStyleSheet('QPushButton{border: 0px solid;}')
self.btn_next_page.setStyleSheet('QPushButton{border: 0px solid;}')
self.grid_actions_layout.setAlignment(QtCore.Qt.AlignCenter)
@property
def tag(self):
return self._tag
@tag.setter
def tag(self,value):
self._tag=value
@property
def items(self):
return self._items
@items.setter
def items(self,value):
self._items=value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self,value):
self._page_size=value
self.update_pager()
@property
def current_page(self):
return self._curr_page+1
@current_page.setter
def current_page(self,val):
self._curr_page=val
self._curr_page=self._curr_page%self.total_pages
self.lbl_current_page.setText(str(self.current_page))
self.bind()
@property
def total_pages(self):
return len(self._pages)
def update_pager(self):
self._pages=list(MiscUtilities.chunk(self._items,self._page_size))
self.lbl_total_pages.setText("{}".format(len(self._pages)))
self.lbl_current_page.setText(str(self.current_page))
def btn_next_page_on_click(self):
if len(self._pages) == 0:
return
self._curr_page+=1
self.current_page=self._curr_page
def btn_last_page_on_click(self):
if len(self._pages) == 0:
return
self.current_page=len(self._pages)-1
def btn_first_page_on_click(self):
if len(self._pages) == 0:
return
self.current_page=0
def btn_prev_page_on_click(self):
if len(self._pages) == 0:
return
self._curr_page-=1
self.current_page=self._curr_page
def dragEnterEvent(self,event: QtGui.QDragEnterEvent) -> None:
data=event.mimeData()
if data.hasUrls():
if any(url.isLocalFile() for url in data.urls()):
event.accept()
return
else:
event.ignore()
def dragMoveEvent(self,event: QtGui.QDragMoveEvent) -> None:
if event.mimeData().hasUrls:
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
return
else:
event.ignore()
def dropEvent(self,event: QtGui.QDropEvent) -> None:
valid_files=[]
files=[u.toLocalFile() for u in event.mimeData().urls()]
for f in files:
if os.path.isfile(f):
mime_type,encoding=mimetypes.guess_type(f) # magic.from_file(f,mime=True)
if mime_type.find("video") != -1 and self.content_type == "Videos":
valid_files.append(f)
elif mime_type.find("image") != -1 and self.content_type == "Images":
valid_files.append(f)
valid_files=sorted(valid_files,key=lambda v: os.path.basename(v))
self.filesDropped.emit(valid_files)
def load_images(self):
def do_work():
page=self._curr_page
items=self._pages[page]
def create_thumbnail(item):
file_path=item.file_path
if os.path.isfile(file_path):
image=cv2.imread(file_path)
h,w,_=np.shape(image)
if w > h:
thumbnail_array=imutils.resize(image,width=150)
else:
thumbnail_array=imutils.resize(image,height=150)
thumbnail_array=cv2.cvtColor(thumbnail_array,cv2.COLOR_BGR2RGB)
thumbnail=GUIUtilities.array_to_qimage(thumbnail_array)
thumbnail=QPixmap.fromImage(thumbnail)
del thumbnail_array
del image
return item,h,w,thumbnail,os.path.getsize(file_path), False
thumbnail = GUIUtilities.get_image("placeholder.png")
thumbnail = thumbnail.scaledToHeight(100)
h, w = thumbnail.height(), thumbnail.width()
return item, h, w, thumbnail, 0, True
delayed_tasks=[dask.delayed(create_thumbnail)(item) for item in items]
images=dask.compute(*delayed_tasks)
return images
def done_work(images):
for img in images:
if img:
item,h,w,thumbnail,file_size, is_broken=img
image_card=ImageCard()
image_card.is_broken = is_broken
image_card.tag=item
image_card.source=thumbnail
image_card.file_path=item.file_path
image_size_str = size(file_size,system=alternative) if file_size > 0 else "0 MB"
image_card.label.setText("\n ({0}px / {1}px) \n {2}".format(w,h, image_size_str))
image_card.setFixedHeight(240)
image_card.doubleClicked.connect(self.gallery_card_double_click)
image_card.add_buttons(self.actions)
if self.actions:
image_card.actionClicked.connect(lambda name,item: self.cardActionClicked.emit(name,item))
self.center_layout.add_item(image_card)
def finished_work():
self._loading_dialog.close()
self.enable_paginator(True)
worker=Worker(do_work)
worker.signals.result.connect(done_work)
worker.signals.finished.connect(finished_work)
self._thread_pool.start(worker)
self.enable_paginator(False)
self._loading_dialog.show()
def bind(self):
self.update_pager()
if len(self._pages) > 0:
self.center_widget=QWidget()
self.center_layout=GalleryLayout()
self.center_widget.setLayout(self.center_layout)
self.center_layout.setAlignment(QtCore.Qt.AlignTop)
self.scrollArea.setWidget(self.center_widget)
self.center_layout.initialize(n_items=self.page_size)
if self.content_type == "Images":
self.load_images()
else:
raise NotImplementedError
else:
self.center_widget=QWidget()
self.center_layout=QVBoxLayout()
self.center_widget.setLayout(self.center_layout)
self.center_layout.setAlignment(QtCore.Qt.AlignCenter)
self.center_layout.addWidget(QLabel("Drag and Drop your files"))
self.scrollArea.setWidget(self.center_widget)
def gallery_card_double_click(self,card: GalleryCard):
self.doubleClicked.emit(card,self)
def btn_check_all_on_click_slot(self):
if self.items is None:
return
layout=self.scrollArea.widget().layout()
for i in reversed(range(layout.count())):
child=layout.itemAt(i)
widget=child.widget()
if isinstance(child.widget(),GalleryCard):
widget.is_selected=True
def btn_uncheck_all_on_click_slot(self):
if self.items is None:
return
layout=self.scrollArea.widget().layout()
for i in reversed(range(layout.count())):
child=layout.itemAt(i)
widget=child.widget()
if isinstance(child.widget(),GalleryCard):
widget.is_selected=False
| [
"[email protected]"
] | |
df9f2ebddc59d18e8456ed82a9f87bc2a8b14002 | 74081581575e80b2b0f6b75ba912d58ea4f37ac6 | /maskrcnn_benchmark/modeling/rpn/inference.py | 26c6cbe68d37d714dcd22effb92a28c06445a4ba | [] | no_license | youngfly11/LCMCG-PyTorch | 5f6b9f231613b86ac7b250ca0f34229402e1615e | e95299b9a9f1b13e21750ef0dcde0941d703d009 | refs/heads/master | 2021-10-25T19:29:12.967318 | 2021-10-25T03:35:14 | 2021-10-25T03:35:14 | 221,908,808 | 56 | 12 | null | null | null | null | UTF-8 | Python | false | false | 8,755 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
from maskrcnn_benchmark.modeling.box_coder import BoxCoder
from maskrcnn_benchmark.structures.bounding_box import BoxList
from maskrcnn_benchmark.structures.boxlist_ops import cat_boxlist
from maskrcnn_benchmark.structures.boxlist_ops import boxlist_nms
from maskrcnn_benchmark.structures.boxlist_ops import remove_small_boxes
from ..utils import cat
from .utils import permute_and_flatten
class RPNPostProcessor(torch.nn.Module):
"""
Performs post-processing on the outputs of the RPN boxes, before feeding the
proposals to the heads
apply the proposal regression result on the anchor boxes
"""
def __init__(
self,
pre_nms_top_n,
post_nms_top_n,
nms_thresh,
min_size,
box_coder=None,
fpn_post_nms_top_n=None,
):
"""
Arguments:
two nms operation
pre_nms_top_n (int) for single feature level
post_nms_top_n (int) for all feature level, image wise nms
nms_thresh (float)
min_size (int)
box_coder (BoxCoder)
fpn_post_nms_top_n (int)
"""
super(RPNPostProcessor, self).__init__()
self.pre_nms_top_n = pre_nms_top_n
self.post_nms_top_n = post_nms_top_n
self.nms_thresh = nms_thresh
self.min_size = min_size
if box_coder is None:
box_coder = BoxCoder(weights=(1.0, 1.0, 1.0, 1.0))
self.box_coder = box_coder
if fpn_post_nms_top_n is None:
fpn_post_nms_top_n = post_nms_top_n
self.fpn_post_nms_top_n = fpn_post_nms_top_n
def add_gt_proposals(self, proposals, targets):
"""
Arguments:
proposals: list[BoxList]
targets: list[BoxList]
"""
# Get the device we're operating on
device = proposals[0].bbox.device
gt_boxes = [target.copy_with_fields([]) for target in targets]
# later cat of bbox requires all fields to be present for all bbox
# so we need to add a dummy for objectness that's missing
for gt_box in gt_boxes:
gt_box.add_field("objectness", torch.ones(len(gt_box), device=device))
proposals = [
# put the gt to front
cat_boxlist((gt_box, proposal))
for proposal, gt_box in zip(proposals, gt_boxes)
]
return proposals
def forward_for_single_feature_map(self, anchors, objectness, box_regression):
"""
apply the RPN result on anchors generate from single feature level
from ont batch(has multiple images)
Arguments:
anchors: list[BoxList]
objectness: tensor of size N, A, H, W
box_regression: tensor of size N, A * 4, H, W
"""
device = objectness.device
N, A, H, W = objectness.shape
# put in the same format as anchors
objectness = permute_and_flatten(objectness, N, A, 1, H, W).view(N, -1)
objectness = objectness.sigmoid()
box_regression = permute_and_flatten(box_regression, N, A, 4, H, W)
num_anchors = A * H * W
# decrease the proposal anchor number before the nms
pre_nms_top_n = min(self.pre_nms_top_n, num_anchors)
# filter the proposal bboxes by objectness score,
# only left the hign objectness proposals for following operation
objectness, topk_idx = objectness.topk(pre_nms_top_n, dim=1, sorted=True)
batch_idx = torch.arange(N, device=device)[:, None]
# take out the high objectness bbox regression result
box_regression = box_regression[batch_idx, topk_idx]
# preprocess the anchors for easy to process
image_shapes = [box.size for box in anchors]
concat_anchors = torch.cat([a.bbox for a in anchors], dim=0)
concat_anchors = concat_anchors.reshape(N, -1, 4)[batch_idx, topk_idx]
# apply the regression on the anchor boxes
proposals = self.box_coder.decode(
box_regression.view(-1, 4), concat_anchors.view(-1, 4)
)
proposals = proposals.view(N, -1, 4)
result = []
# collect the processed anchor boxes in to BoxList form
# and apply the nms to generate the final proposals
for proposal, score, im_shape in zip(proposals, objectness, image_shapes):
boxlist = BoxList(proposal, im_shape, mode="xyxy")
boxlist.add_field("objectness", score)
boxlist = boxlist.clip_to_image(remove_empty=False)
boxlist = remove_small_boxes(boxlist, self.min_size)
boxlist = boxlist_nms(
boxlist,
self.nms_thresh,
max_proposals=self.post_nms_top_n,
score_field="objectness",
)
result.append(boxlist)
return result
def forward(self, anchors, objectness, box_regression, targets=None):
"""
Arguments:
rpn model output of multiple feature levels.
anchors: list[list[BoxList]]
objectness: list[tensor]
box_regression: list[tensor]
Returns:
boxlists (list[BoxList]): the post-processed anchors, after
applying box decoding and NMS
"""
sampled_boxes = []
num_levels = len(objectness)
anchors = list(zip(*anchors))
# the pre-nms first time nms one each level features
for a, o, b in zip(anchors, objectness, box_regression):
sampled_boxes.append(self.forward_for_single_feature_map(a, o, b))
boxlists = list(zip(*sampled_boxes))
boxlists = [cat_boxlist(boxlist) for boxlist in boxlists]
# collect the bounding boxes for same image into one list
# the post-nms on global image
if num_levels > 1:
boxlists = self.select_over_all_levels(boxlists)
# append ground-truth bboxes to proposals for supervision
if self.training and targets is not None:
boxlists = self.add_gt_proposals(boxlists, targets)
return boxlists
def select_over_all_levels(self, boxlists):
num_images = len(boxlists)
# different behavior during training and during testing:
# during training, post_nms_top_n is over *all* the proposals combined, while
# during testing, it is over the proposals for each image
# TODO resolve this difference and make it consistent. It should be per image,
# and not per batch
if self.training:
objectness = torch.cat(
[boxlist.get_field("objectness") for boxlist in boxlists], dim=0
)
box_sizes = [len(boxlist) for boxlist in boxlists]
post_nms_top_n = min(self.fpn_post_nms_top_n, len(objectness))
_, inds_sorted = torch.topk(objectness, post_nms_top_n, dim=0, sorted=True)
inds_mask = torch.zeros_like(objectness, dtype=torch.uint8)
inds_mask[inds_sorted] = 1
inds_mask = inds_mask.split(box_sizes)
for i in range(num_images):
boxlists[i] = boxlists[i][inds_mask[i]]
else:
for i in range(num_images):
objectness = boxlists[i].get_field("objectness")
post_nms_top_n = min(self.fpn_post_nms_top_n, len(objectness))
_, inds_sorted = torch.topk(
objectness, post_nms_top_n, dim=0, sorted=True
)
boxlist = boxlists[i][inds_sorted]
boxlist = boxlist_nms(
boxlist,
self.nms_thresh,
max_proposals=self.post_nms_top_n,
score_field="objectness",
)
boxlists[i] = boxlist
return boxlists
def make_rpn_postprocessor(config, rpn_box_coder, is_train):
fpn_post_nms_top_n = config.MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN
if not is_train:
fpn_post_nms_top_n = config.MODEL.RPN.FPN_POST_NMS_TOP_N_TEST
pre_nms_top_n = config.MODEL.RPN.PRE_NMS_TOP_N_TRAIN
post_nms_top_n = config.MODEL.RPN.POST_NMS_TOP_N_TRAIN
if not is_train:
pre_nms_top_n = config.MODEL.RPN.PRE_NMS_TOP_N_TEST
post_nms_top_n = config.MODEL.RPN.POST_NMS_TOP_N_TEST
nms_thresh = config.MODEL.RPN.NMS_THRESH
min_size = config.MODEL.RPN.MIN_SIZE
box_selector = RPNPostProcessor(
pre_nms_top_n=pre_nms_top_n,
post_nms_top_n=post_nms_top_n,
nms_thresh=nms_thresh,
min_size=min_size,
box_coder=rpn_box_coder,
fpn_post_nms_top_n=fpn_post_nms_top_n,
)
return box_selector
| [
"[email protected]"
] | |
f13ffc91ce07bbf34c92634a9fc7e5c133c1bdbe | f337bc5f179b25969ba73e7680ffb0a0616e3b97 | /python/BOJ/1XXX/1525.py | 230082c879b52617c741e0d9d696d6414422de10 | [] | no_license | raiders032/PS | 31771c5496a70f4730402698f743bbdc501e49a3 | 08e1384655975b868e80521167ec876b96fa01c8 | refs/heads/master | 2023-06-08T10:21:00.230154 | 2023-06-04T01:38:08 | 2023-06-04T01:38:08 | 349,925,005 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | """
https://www.acmicpc.net/problem/1525
1525.퍼즐
골드2
풀이2.
"""
| [
"[email protected]"
] | |
4a9d371ca2d986c987d3aff9a4005a5dd3da1364 | eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6 | /ccpnmr2.4/python/paris/aria/AriaRunFrame.py | 389054dd281b65767174a9163b4c4474e8987a19 | [] | no_license | edbrooksbank/ccpnmr2.4 | cfecb0896dcf8978d796e6327f7e05a3f233a921 | f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c | refs/heads/master | 2021-06-30T22:29:44.043951 | 2019-03-20T15:01:09 | 2019-03-20T15:01:09 | 176,757,815 | 0 | 1 | null | 2020-07-24T14:40:26 | 2019-03-20T14:59:23 | HTML | UTF-8 | Python | false | false | 11,914 | py | import os, sys
import Tkinter
from memops.gui.Button import Button
from memops.gui.DataEntry import askString
from memops.gui.Entry import Entry
from memops.gui.Frame import Frame
from memops.gui.FileSelectPopup import FileSelectPopup
from memops.gui.FileSelect import FileType
from memops.gui.Label import Label
from memops.gui.LabelFrame import LabelFrame
from memops.gui.MessageReporter import showWarning, showOkCancel
from memops.gui.PulldownList import PulldownList
from memops.gui.ScrolledMatrix import ScrolledMatrix
from ccp.util.NmrCalc import getObjBooleanParameter, toggleObjBooleanParameter, setRunParameter, getRunTextParameter
from ccpnmr.analysis.core.ExperimentBasic import getThroughSpacePeakLists
from ccpnmr.analysis.popups.EditCalculation import NmrCalcRunFrame, PEAK_DATA, CONSTRAINT_DATA
from ccpnmr.analysis.popups.BasePopup import BasePopup
PARAM_ATTR_DICT = {type(1.0):'floatValue',
type(1):'intValue',
type(True):'booleanValue'}
FILTER_VIOL = 'FilterViol'
KEEP_ASSIGN = 'KeepAssign'
AMBIG_PROTOCOL = 'AmbigProtocol'
USE_IN_CALC = 'UseInCalc'
CNS_EXE = 'CnsExe'
FILE_PREFIX = 'FilePrefix'
WORKING_DIR = 'WorkingDir'
TEMP_DIR = 'TempDir'
class AriaRunFrame(NmrCalcRunFrame):
def __init__(self, parent, project, calcStore, *args, **kw):
NmrCalcRunFrame.__init__(self, parent, project, calcStore,
inputTypes=(PEAK_DATA, CONSTRAINT_DATA),
chainSelection=True, *args, **kw)
self.calcStore = calcStore
self.optPeakList = None
self.optConstraintList = None
frame = self.tabbedFrame.frames[1]
headingList = ['PeakList','Filter\nViolated?',
'Keep\nAssignments?','Shift List']
editWidgets = [None, None, None, None]
editGetCallbacks = [None, self.toggleFilterViol,
self.toggleKeepAssign, None]
editSetCallbacks = [None, None, None, None]
row = 0
subFrame = Frame(frame, grid=(row,0))
subFrame.expandGrid(0,1)
label = Label(subFrame, text='File Name Prefix:', grid=(0,0))
self.filePrefixEntry = Entry(subFrame, text='aria', grid=(0,1), sticky="ew")
self.filePrefixEntry.bind('<Leave>', self.updateEntryParams)
label = Label(subFrame, text='CNS Executable:', grid=(1,0))
self.cnsExeEntry = Entry(subFrame, text='', grid=(1,1), sticky="ew")
self.cnsExeEntry.bind('<Leave>', self.updateEntryParams)
button = Button(subFrame, text='Select File',bd=1,bg='#C0E0FF',
command=self.selectCnsExe, grid=(1,2))
label = Label(subFrame, text='Working Directory:', grid=(2,0))
self.workingDirEntry = Entry(subFrame, text='', grid=(2,1), sticky="ew")
self.workingDirEntry.bind('<Leave>', self.updateEntryParams)
button = Button(subFrame, text='Select File',bd=1,bg='#C0E0FF',
command=self.selectWorkingDir, grid=(2,2))
label = Label(subFrame, text='Temporary Directory:', grid=(3,0))
self.tempDirEntry = Entry(subFrame, text='', grid=(3,1), sticky="ew")
self.tempDirEntry.bind('<Leave>', self.updateEntryParams)
button = Button(subFrame, text='Select File',bd=1,bg='#C0E0FF',
command=self.selectTempDir, grid=(3,2))
row += 1
frame.expandGrid(row,0)
self.grid_rowconfigure(row, weight=1)
self.optPeakListMatrix = ScrolledMatrix(frame, headingList=headingList,
editSetCallbacks=editSetCallbacks,
editGetCallbacks=editGetCallbacks,
editWidgets=editWidgets,
multiSelect=True, grid=(row,0),
callback=self.selectOptPeakList)
row += 1
frame.expandGrid(row,0)
headingList = ['Constraint List',
'Name',
'Filter\nViolated?',
'Ambiguous\nProtocol?',]
editWidgets = [None, None, None, None]
editGetCallbacks = [None, None, self.toggleFilterViol, self.toggleAmbig]
editSetCallbacks = [None, None, None, None]
self.optConstraintMatrix = ScrolledMatrix(frame, headingList=headingList,
editSetCallbacks=editSetCallbacks,
editGetCallbacks=editGetCallbacks,
editWidgets=editWidgets,
multiSelect=True, grid=(row,0),
callback=self.selectConstraintList)
self.optConstraintMatrix.doEditMarkExtraRules = self.doEditMarkExtraRules
self.update(calcStore)
def update(self, calcStore=None):
NmrCalcRunFrame.update(self, calcStore)
self.updateSettings()
# Need to run NmrCalcRunFrame.update before
# this to get new self.run, self.project
if self.run:
repository = self.project.findFirstRepository(name='userData')
defaultDir = repository.url.path
filePrefix = getRunTextParameter(self.run, FILE_PREFIX) or self.project.name
cnsExe = getRunTextParameter(self.run, CNS_EXE) or '/usr/bin/cns'
workingDir = getRunTextParameter(self.run, WORKING_DIR) or defaultDir
tempDir = getRunTextParameter(self.run, TEMP_DIR) or defaultDir
self.filePrefixEntry.set(filePrefix)
self.cnsExeEntry.set(cnsExe)
self.workingDirEntry.set(workingDir)
self.tempDirEntry.set(tempDir)
self.updateEntryParams()
def updateEntryParams(self, event=None):
if self.run:
repository = self.project.findFirstRepository(name='userData')
defaultDir = repository.url.path
filePrefix = self.filePrefixEntry.get() or self.project.name
cnsExe = self.cnsExeEntry.get() or '/usr/bin/cns'
workingDir = self.workingDirEntry.get() or defaultDir
tempDir = self.tempDirEntry.get() or defaultDir
setRunParameter(self.run, FILE_PREFIX, filePrefix)
setRunParameter(self.run, CNS_EXE, cnsExe)
setRunParameter(self.run, WORKING_DIR, workingDir)
setRunParameter(self.run, TEMP_DIR, tempDir)
def selectCnsExe(self):
fileTypes = [ FileType("All", ["*"]), FileType("EXE", ["*.exe"]) ]
popup = FileSelectPopup(self, fileTypes)
file = popup.getFile()
if file:
self.cnsExeEntry.set( file )
popup.destroy()
self.updateEntryParams()
def selectWorkingDir(self):
popup = FileSelectPopup(self, show_file=False)
directory = popup.getDirectory()
if directory:
self.workingDirEntry.set( directory )
popup.destroy()
self.updateEntryParams()
def selectTempDir(self):
popup = FileSelectPopup(self, show_file=False)
directory = popup.getDirectory()
if directory:
self.tempDirEntry.set( directory )
popup.destroy()
self.updateEntryParams()
def getPeakLists(self):
# overwrites superclass
return getThroughSpacePeakLists(self.project)
def administerNotifiers(self, notifyFunc):
NmrCalcRunFrame.administerNotifiers(self, notifyFunc)
for func in ('__init__','delete','setName'):
#notifyFunc(self.updateSettings, 'ccp.nmr.NmrConstraint.AbstractConstraintList', func)
notifyFunc(self.updateSettings, 'ccp.nmr.NmrConstraint.DistanceConstraintList', func)
notifyFunc(self.updateSettings, 'ccp.nmr.NmrConstraint.DihedralConstraintList', func)
notifyFunc(self.updateSettings, 'ccp.nmr.NmrConstraint.HBondConstraintList', func)
def selectOptPeakList(self, obj, row, col):
self.optPeakList = obj
def selectConstraintList(self, obj, row, col):
self.optConstraintList = obj
def doEditMarkExtraRules(self, obj, row, col):
if col in (2,3):
cSet = obj.nmrConstraintStore
if cSet:
for cList in obj.constraintLists:
if cList.className[:-14] != 'Distance':
# i.e. ambig protocols and viol filtering
# is only for dist constraints, not dihedral etc
return False
return True
def toggleUseInCalc(self, dataObj):
toggleObjBooleanParameter(dataObj, USE_IN_CALC)
self.updateSettings()
def toggleFilterViol(self, dataObj):
toggleObjBooleanParameter(dataObj, FILTER_VIOL)
self.updateSettings()
def toggleKeepAssign(self, dataObj):
toggleObjBooleanParameter(dataObj, KEEP_ASSIGN)
self.updateSettings()
def toggleAmbig(self, dataObj):
toggleObjBooleanParameter(dataObj, AMBIG_PROTOCOL)
self.updateSettings()
def updateSettings(self, obj=None):
textMatrix = []
objectList = []
run = self.run
if run:
peakListsData = []
constraintData = []
molSystemData = None
for dataObj in run.sortedInputs():
className = dataObj.className
if className == 'PeakListData':
peakList = dataObj.peakList
if peakList:
peakListsData.append((dataObj, peakList))
elif className == 'MolSystemData':
molSystem = dataObj.molSystem
if molSystem:
molSystemData = (dataObj, molSystem)
elif className == 'ConstraintStoreData':
nmrConstraintStore = dataObj.nmrConstraintStore
if nmrConstraintStore:
constraintLists = dataObj.constraintLists or \
nmrConstraintStore.sortedConstraintLists()
# Chould be only one
for constraintList in constraintLists:
if constraintList is None:
# Prob happens when serial no longer exists
continue
constraintData.append((dataObj, constraintList))
for dataObj, peakList in peakListsData:
spectrum = peakList.dataSource
experiment = spectrum.experiment
filterViol = getObjBooleanParameter(dataObj, FILTER_VIOL)
keepAssign = getObjBooleanParameter(dataObj, KEEP_ASSIGN)
shiftList = peakList.dataSource.experiment.shiftList
if shiftList:
shiftListName = '%d:%s' % (shiftList.serial,shiftList.name)
else:
shiftListName = None
ident = '%s:%s:%d' % (experiment.name,spectrum.name,peakList.serial)
textMatrix.append([ident,
filterViol and 'Yes' or 'No',
keepAssign and 'Yes' or 'No',
shiftListName])
objectList.append(dataObj)
self.optPeakListMatrix.update(textMatrix=textMatrix,objectList=objectList)
textMatrix = []
objectList = []
for dataObj, constraintList in constraintData:
#useInCalc = getObjBooleanParameter(dataObj, USE_IN_CALC)
cSet = dataObj.constraintStoreSerial
cType = constraintList.className[:-14]
if cType == 'Distance':
filterViol = getObjBooleanParameter(dataObj, FILTER_VIOL)
ambigProtocol = getObjBooleanParameter(dataObj, AMBIG_PROTOCOL)
ambigProtocol = ambigProtocol and 'Yes' or 'No'
filterViol = filterViol and 'Yes' or 'No'
else:
ambigProtocol = None
filterViol = None
ident = '%s - %d:%d' % (cType, cSet, constraintList.serial)
textMatrix.append([ident,
constraintList.name,
filterViol,
ambigProtocol])
objectList.append(dataObj)
self.optConstraintMatrix.update(textMatrix=textMatrix,
objectList=objectList)
| [
"[email protected]"
] | |
5fe28bbb5bc93700043f88a007f3546307b639c3 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/class_def_attr-big-240.py | b5bd95ff2c12b474eee61f9edfe88fbe2ca06b44 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,288 | py | class A(object):
x:int = 1
class A2(object):
x:int = 1
x2:int = 1
class A3(object):
x:int = 1
x2:int = 1
x3:int = 1
class A4(object):
x:int = 1
x2:int = 1
x3:int = 1
x4:int = 1
class A5(object):
x:int = 1
x2:int = 1
x3:int = 1
x4:int = 1
x5:int = 1
class B(A):
def __init__(self: "B"):
pass
class B2(A):
def __init__(self: "B2"):
pass
class B3(A):
def __init__(self: "B3"):
pass
class B4(A):
def __init__(self: "B4"):
pass
class B5(A):
def $ID(self: "B5"):
pass
class C(B):
z:bool = True
class C2(B):
z:bool = True
z2:bool = True
class C3(B):
z:bool = True
z2:bool = True
z3:bool = True
class C4(B):
z:bool = True
z2:bool = True
z3:bool = True
z4:bool = True
class C5(B):
z:bool = True
z2:bool = True
z3:bool = True
z4:bool = True
z5:bool = True
a:A = None
a2:A = None
a3:A = None
a4:A = None
a5:A = None
b:B = None
b2:B = None
b3:B = None
b4:B = None
b5:B = None
c:C = None
c2:C = None
c3:C = None
c4:C = None
c5:C = None
a = A()
a2 = A()
a3 = A()
a4 = A()
a5 = A()
b = B()
b2 = B()
b3 = B()
b4 = B()
b5 = B()
c = C()
c2 = C()
c3 = C()
c4 = C()
c5 = C()
a.x = 1
b.x = a.x
c.z = a.x == b.x
| [
"[email protected]"
] | |
35e4783fdc79bd2c34545d469bcb5e0bd5cd5ee1 | 969fed6b9f4c0daa728bda52fea73d94bda6faad | /fakeTempControl/oxford/SIM_MERCURY.py | 8052e93e5bcb55584ef3d3095d82cb5133ffb948 | [] | no_license | ess-dmsc/essiip-fakesinqhw | 7d4c0cb3e412a510db02f011fb9c20edfbd8a84f | ad65844c99e64692f07e7ea04d624154a92d57cd | refs/heads/master | 2021-01-18T22:50:50.182268 | 2020-10-01T08:39:30 | 2020-10-01T08:39:30 | 87,077,121 | 0 | 0 | null | 2018-12-07T08:43:00 | 2017-04-03T13:28:23 | Python | UTF-8 | Python | false | false | 4,643 | py | #!/usr/bin/env python
# vim: ft=python ts=8 sts=4 sw=4 expandtab autoindent smartindent nocindent
# Author: Douglas Clowes ([email protected]) 2013-06-03
from twisted.internet.task import LoopingCall
from twisted.internet import reactor
from twisted.python import log, usage
from MercurySCPI import MercurySCPI as MYBASE
from MercuryFactory import MercuryFactory
from MercuryProtocol import MercuryProtocol
import os
import sys
sys.path.insert(0, os.path.realpath(os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]),"../../util"))))
from displayscreen import Screen
class MyOptions(usage.Options):
optFlags = [
["window", "w", "Create a display window"],
]
optParameters = [
["logfile", "l", None, "output logfile name"],
["port", "p", None, "port number to listen on"],
]
def __init__(self):
usage.Options.__init__(self)
self['files'] = []
def parseArgs(self, *args):
for arg in args:
self['files'].append(arg)
class MyScreen(Screen):
def __init__(self, stdscr):
Screen.__init__(self, stdscr)
def sendLine(self, txt):
global myDev
myDev.protocol = self
myDev.dataReceived(txt)
def write(self, txt):
try:
newLine = self.lines[-1] + " => " + txt
del self.lines[-1]
self.addLine(newLine)
except:
pass
class MYDEV(MYBASE):
def __init__(self):
MYBASE.__init__(self)
print MYDEV.__name__, "ctor"
def device_display():
global screen, myDev, myOpts, myPort, myFactory
try:
myDev.doIteration();
except:
raise
if not myOpts["window"]:
return
try:
rows, cols = screen.stdscr.getmaxyx()
screen.stdscr.addstr(0, 0, "Lnks:%2d" % myFactory.numProtocols)
screen.stdscr.addstr(0, 10, "Rnd:%6.3f" % myDev.RANDOM)
screen.stdscr.addstr(0, 22, "Identity : %s (%d)" % (myDev.IDN, myPort))
screen.stdscr.addstr(1, 0, "Valve: %8.4f%%" % myDev.valve_open)
screen.stdscr.addstr(1, 20, "Helium: %8.4f%%" % myDev.hlev)
screen.stdscr.addstr(1, 40, "Nitrogen: %8.4f%%" % myDev.nlev)
base = 1
screen.stdscr.addstr(base + 1, 0, "Sensor :")
screen.stdscr.addstr(base + 2, 0, "PV :")
screen.stdscr.addstr(base + 3, 0, "Setpoint :")
screen.stdscr.addstr(base + 4, 0, "T Delta :")
screen.stdscr.addstr(base + 5, 0, "PV Delta :")
for idx in myDev.CONFIG_SNSRS:
if 12 + (idx - 1) * 12 > cols - 1:
break
screen.stdscr.addstr(base + 1, 12 + (idx - 1) * 12, "%8.3f" % myDev.Loops[idx].sensor)
for idx in myDev.CONFIG_LOOPS:
if 12 + (idx - 1) * 12 > cols - 1:
break
screen.stdscr.addstr(base + 2, 12 + (idx - 1) * 12, "%8.3f" % myDev.Loops[idx].pv)
screen.stdscr.addstr(base + 3, 12 + (idx - 1) * 12, "%8.3f" % myDev.Loops[idx].setpoint)
screen.stdscr.addstr(base + 4, 12 + (idx - 1) * 12, "%8.3f" % (myDev.Loops[idx].setpoint - myDev.Loops[idx].sensor))
screen.stdscr.addstr(base + 5, 12 + (idx - 1) * 12, "%8.3f" % (myDev.Loops[idx].setpoint - myDev.Loops[idx].pid_delta))
except:
pass
finally:
try:
screen.stdscr.refresh()
except:
pass
if __name__ == "__main__":
global screen, myDev, myOpts, myPort, myFactory
myOpts = MyOptions()
try:
myOpts.parseOptions()
except usage.UsageError, errortext:
print '%s: %s' % (sys.argv[0], errortext)
print '%s: Try --help for usage details.' % (sys.argv[0])
raise SystemExit, 1
myDev = MYDEV()
default_port = 7020
myPort = default_port
logfile = None
if myOpts["port"]:
myPort = int(myOpts["port"])
if myPort < 1025 or myPort > 65535:
myPort = default_port
if myOpts["window"]:
logfile = "/tmp/Fake_Mercury_%d.log" % (myPort)
if myOpts["logfile"]:
logfile = myOpts["logfile"]
if logfile:
log.startLogging(open(logfile, "w"))
else:
log.startLogging(sys.stdout)
#log.startLogging(sys.stderr)
if myOpts["window"]:
import curses
stdscr = curses.initscr()
screen = MyScreen(stdscr)
# add screen object as a reader to the reactor
reactor.addReader(screen)
myFactory = MercuryFactory(MercuryProtocol, myDev, "\r")
lc = LoopingCall(device_display)
lc.start(0.250)
reactor.listenTCP(myPort, myFactory) # server
reactor.run()
| [
"[email protected]"
] | |
47db47b32423507921839a5579f5c66157eed44b | d115cf7a1b374d857f6b094d4b4ccd8e9b1ac189 | /pyplusplus_dev/pyplusplus/_logging_/__init__.py | a888e10fb87659587d0fef6b90646c31533e1bb1 | [
"BSL-1.0"
] | permissive | gatoatigrado/pyplusplusclone | 30af9065fb6ac3dcce527c79ed5151aade6a742f | a64dc9aeeb718b2f30bd6a5ff8dcd8bfb1cd2ede | refs/heads/master | 2016-09-05T23:32:08.595261 | 2010-05-16T10:53:45 | 2010-05-16T10:53:45 | 700,369 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,227 | py | # Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#TODO: find better place for it
"""defines logger classes"""
import os
import sys
import logging
import cStringIO
from multi_line_formatter import multi_line_formatter_t
def create_handler( stream=None ):
handler = None
if stream:
handler = logging.StreamHandler(stream)
else:
handler = logging.StreamHandler(stream)
handler.setFormatter( multi_line_formatter_t( os.linesep + '%(levelname)s: %(message)s' ) )
return handler
def _create_logger_( name, stream=None ):
"""implementation details"""
logger = logging.getLogger(name)
logger.propagate = False
logger.addHandler( create_handler(stream) )
logger.setLevel(logging.INFO)
return logger
class loggers:
"""class-namespace, defines few loggers classes, used in the project"""
stream = None
file_writer = _create_logger_( 'pyplusplus.file_writer' )
"""logger for classes that write code to files"""
declarations = _create_logger_( 'pyplusplus.declarations' )
"""logger for declaration classes
This is very import logger. All important messages: problems with declarations,
warnings or hints are written to this logger.
"""
module_builder = _create_logger_( 'pyplusplus.module_builder' )
"""logger that in use by :class:`module_builder.module_builder_t` class.
Just another logger. It exists mostly for `Py++` developers.
"""
#root logger exists for configuration purpose only
root = logging.getLogger( 'pyplusplus' )
"""root logger exists for your convenience only"""
all = [ root, file_writer, module_builder, declarations ]
"""contains all logger classes, defined by the class"""
@staticmethod
def make_inmemory():
loggers.stream = cStringIO.StringIO()
for logger in loggers.all:
map( lambda h: logger.removeHandler( h ), logger.handlers[:] )
logger.addHandler( create_handler( loggers.stream ) )
| [
"roman_yakovenko@dc5859f9-2512-0410-ae5c-dd123cda1f76"
] | roman_yakovenko@dc5859f9-2512-0410-ae5c-dd123cda1f76 |
f5fded1a9cdae582a279e092b4b999bd1c6375da | c1c39c5e9456a4c175c651ba224a53c4a76f902a | /helpers/azure.py | dd8023fe4596e2fb34fbe94615ca383e28d235ef | [] | no_license | syllogy/cloud_sizes | 5312c190c88303e78601496f3cc0206e5f7d0991 | b97b782a2e786373992ca0ca51b40625d2d2ea91 | refs/heads/master | 2023-07-11T19:00:57.057759 | 2021-08-27T03:53:08 | 2021-08-27T03:54:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 987 | py | #!/usr/bin/env python3
from netaddr import IPSet, IPNetwork
from itertools import chain
from requests import get
import re
def get_and_parse():
# I'm shocked, shocked I tell you to see that MS requires you do
# something oddball like dig into an HTML page to get the latest
# data file.
url = "https://www.microsoft.com/en-us/download/confirmation.aspx?id=56519"
data = get(url).text
m = re.search('(?P<json>https://download.*?\.json)', data)
url = m.group("json")
data = get(url).json()
# Pull out all of the IPs
azure = IPSet(IPNetwork(y) for y in chain.from_iterable(x['properties']['addressPrefixes'] for x in data['values']))
# Pull out the v4 and v6 cidrs
v4 = IPSet([x for x in azure.iter_cidrs() if x.network.version == 4])
v6 = IPSet([x for x in azure.iter_cidrs() if x.network.version == 6])
return "azure", "Azure", v4, v6, True
if __name__ == "__main__":
print("This module is not meant to be run directly")
| [
"[email protected]"
] | |
9640b58c5a0cb8df3d449504d5764902b2ec7211 | 59df4e1fd50d2e81b6490bb5322084165033cefc | /seed.py | b773b252a7d0b6c0a87526060a0884f95e697567 | [] | no_license | JKinsler/ratings-lab | 0cfafd345e27f19dfcedb249a23d5bf7fc6eebb0 | 6c7196c36dd6c09074b84deca0653ae445ca7651 | refs/heads/master | 2023-02-08T11:03:30.762832 | 2020-02-06T20:12:57 | 2020-02-06T20:12:57 | 238,305,896 | 1 | 0 | null | 2023-02-02T05:13:18 | 2020-02-04T21:03:13 | Python | UTF-8 | Python | false | false | 3,561 | py | """Utility file to seed ratings database from MovieLens data in seed_data/"""
from sqlalchemy import func
from datetime import datetime
from model import User
from model import Rating
from model import Movie
from model import connect_to_db, db
from server import app
def load_users():
"""Load users from u.user into database."""
print("Users")
# Delete all rows in table, so if we need to run this a second time,
# we won't be trying to add duplicate users
User.query.delete()
# Read u.user file and insert data
for row in open("seed_data/u.user"):
row = row.rstrip()
user_id, age, gender, occupation, zipcode = row.split("|")
user = User(user_id=user_id,
age=age,
zipcode=zipcode)
# We need to add to the session or it won't ever be stored
db.session.add(user)
# Once we're done, we should commit our work
db.session.commit()
def load_movies():
"""Load movies from u.item into database."""
# >>> date_str6 = "01-Jan-1995"
# >>> format6 = "%d-%b-%Y"
# >>> date6 = datetime.strptime(date_str6, format6)
# >>> date6
# datetime.datetime(1995, 1, 1, 0, 0)
# >>> date6.year
# 1995
print("Movies")
# Delete all rows in table, so if we need to run this a second time,
# we won't be trying to add duplicate users
Movie.query.delete()
# Read u.user file and insert data
for row in open("seed_data/u.item"):
row = row.rstrip()
movie_data = row.split("|")
# movie_id, title, released_at, _, imdb_url = movie_data[:5]
title = movie_data[1].split()
title.pop()
title = ' '.join(title)
released_at = movie_data[2]
format_released_at = "%d-%b-%Y"
date_released = datetime.strptime(released_at, format_released_at)
movie = Movie(movie_id=movie_data[0],
title=title,
release_at=date_released,
imdb_url = movie_data[4])
# We need to add to the session or it won't ever be stored
db.session.add(movie)
# Once we're done, we should commit our work
db.session.commit()
def load_ratings():
"""Load ratings from u.data into database."""
print("Ratings")
# Delete all rows in table, so if we need to run this a second time,
# we won't be trying to add duplicate users
Rating.query.delete()
# Read u.user file and insert data
for row in open("seed_data/u.data"):
row = row.rstrip()
user_id, movie_id, score = row.split()[:3]
rating = Rating(user_id=user_id,
movie_id=movie_id,
score=score)
# We need to add to the session or it won't ever be stored
db.session.add(rating)
# Once we're done, we should commit our work
db.session.commit()
def set_val_user_id():
"""Set value for the next user_id after seeding database"""
# Get the Max user_id in the database
result = db.session.query(func.max(User.user_id)).one()
max_id = int(result[0])
# Set the value for the next user_id to be max_id + 1
query = "SELECT setval('users_user_id_seq', :new_id)"
db.session.execute(query, {'new_id': max_id + 1})
db.session.commit()
if __name__ == "__main__":
connect_to_db(app)
# In case tables haven't been created, create them
db.create_all()
# Import different types of data
load_users()
load_movies()
load_ratings()
set_val_user_id()
| [
"[email protected]"
] | |
d29db2d8b3506d66b6f9f90e3eb98490ae2db3e2 | bcfa02c21a73798872bbb28303233d1f0039cf00 | /server/www/packages/packages-darwin/x64/ldap3/protocol/sasl/kerberos.py | 5000ebf430968b22f94a18fd3c42a8d85112d7ea | [
"Apache-2.0"
] | permissive | zhoulhb/teleport | 6301cd50c951bcbac21cbe24017eb8421ff57adc | 54da194697898ef77537cfe7032d774555dc1335 | refs/heads/master | 2021-11-10T17:10:59.661130 | 2021-11-09T11:16:19 | 2021-11-09T11:16:19 | 192,643,069 | 0 | 0 | Apache-2.0 | 2019-06-19T02:20:53 | 2019-06-19T02:20:52 | null | UTF-8 | Python | false | false | 5,038 | py | """
"""
# Created on 2015.04.08
#
# Author: Giovanni Cannata
#
# Copyright 2015 - 2018 Giovanni Cannata
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
# original code by Hugh Cole-Baker, modified by Peter Foley
# it needs the gssapi package
import socket
from ...core.exceptions import LDAPPackageUnavailableError, LDAPCommunicationError
try:
# noinspection PyPackageRequirements,PyUnresolvedReferences
import gssapi
except ImportError:
raise LDAPPackageUnavailableError('package gssapi missing')
from .sasl import send_sasl_negotiation, abort_sasl_negotiation
NO_SECURITY_LAYER = 1
INTEGRITY_PROTECTION = 2
CONFIDENTIALITY_PROTECTION = 4
def sasl_gssapi(connection, controls):
"""
Performs a bind using the Kerberos v5 ("GSSAPI") SASL mechanism
from RFC 4752. Does not support any security layers, only authentication!
sasl_credentials can be empty or a tuple with one or two elements.
The first element determines which service principal to request a ticket for and can be one of the following:
- None or False, to use the hostname from the Server object
- True to perform a reverse DNS lookup to retrieve the canonical hostname for the hosts IP address
- A string containing the hostname
The optional second element is what authorization ID to request.
- If omitted or None, the authentication ID is used as the authorization ID
- If a string, the authorization ID to use. Should start with "dn:" or "user:".
"""
target_name = None
authz_id = b""
if connection.sasl_credentials:
if len(connection.sasl_credentials) >= 1 and connection.sasl_credentials[0]:
if connection.sasl_credentials[0] is True:
hostname = socket.gethostbyaddr(connection.socket.getpeername()[0])[0]
target_name = gssapi.Name('ldap@' + hostname, gssapi.NameType.hostbased_service)
else:
target_name = gssapi.Name('ldap@' + connection.sasl_credentials[0], gssapi.NameType.hostbased_service)
if len(connection.sasl_credentials) >= 2 and connection.sasl_credentials[1]:
authz_id = connection.sasl_credentials[1].encode("utf-8")
if target_name is None:
target_name = gssapi.Name('ldap@' + connection.server.host, gssapi.NameType.hostbased_service)
creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate') if connection.user else None
ctx = gssapi.SecurityContext(name=target_name, mech=gssapi.MechType.kerberos, creds=creds)
in_token = None
try:
while True:
out_token = ctx.step(in_token)
if out_token is None:
out_token = ''
result = send_sasl_negotiation(connection, controls, out_token)
in_token = result['saslCreds']
try:
# This raised an exception in gssapi<1.1.2 if the context was
# incomplete, but was fixed in
# https://github.com/pythongssapi/python-gssapi/pull/70
if ctx.complete:
break
except gssapi.exceptions.MissingContextError:
pass
unwrapped_token = ctx.unwrap(in_token)
if len(unwrapped_token.message) != 4:
raise LDAPCommunicationError("Incorrect response from server")
server_security_layers = unwrapped_token.message[0]
if not isinstance(server_security_layers, int):
server_security_layers = ord(server_security_layers)
if server_security_layers in (0, NO_SECURITY_LAYER):
if unwrapped_token.message[1:] != '\x00\x00\x00':
raise LDAPCommunicationError("Server max buffer size must be 0 if no security layer")
if not (server_security_layers & NO_SECURITY_LAYER):
raise LDAPCommunicationError("Server requires a security layer, but this is not implemented")
client_security_layers = bytearray([NO_SECURITY_LAYER, 0, 0, 0])
out_token = ctx.wrap(bytes(client_security_layers)+authz_id, False)
return send_sasl_negotiation(connection, controls, out_token.message)
except (gssapi.exceptions.GSSError, LDAPCommunicationError):
abort_sasl_negotiation(connection, controls)
raise
| [
"[email protected]"
] | |
242f05cd4aae7555fbe6bf5702093febdcbb83e4 | 81eff1c9bc75cd524153400cdbd7c453ee8e3635 | /zxcar_ws/devel/lib/python2.7/dist-packages/astra_camera/srv/_GetDeviceType.py | 09fd9a871d38977c6e36077b57b4a6a1ceed594b | [] | no_license | sukai33/zxcar_all | bbacbf85c5e7c93d2e98b03958342ec01e3dafd9 | af389f095591a70cae01c1d116aa74d68223f317 | refs/heads/master | 2023-01-03T13:32:00.864543 | 2020-10-29T05:22:43 | 2020-10-29T05:22:43 | 300,556,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,207 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from astra_camera/GetDeviceTypeRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetDeviceTypeRequest(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "astra_camera/GetDeviceTypeRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetDeviceTypeRequest, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from astra_camera/GetDeviceTypeResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetDeviceTypeResponse(genpy.Message):
_md5sum = "4c8e9dd50b39344412b92ce9e1e9615c"
_type = "astra_camera/GetDeviceTypeResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """string device_type
"""
__slots__ = ['device_type']
_slot_types = ['string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
device_type
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetDeviceTypeResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.device_type is None:
self.device_type = ''
else:
self.device_type = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.device_type
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.device_type = str[start:end].decode('utf-8')
else:
self.device_type = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.device_type
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.device_type = str[start:end].decode('utf-8')
else:
self.device_type = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
class GetDeviceType(object):
_type = 'astra_camera/GetDeviceType'
_md5sum = '4c8e9dd50b39344412b92ce9e1e9615c'
_request_class = GetDeviceTypeRequest
_response_class = GetDeviceTypeResponse
| [
"[email protected]"
] | |
de4a7ed167183486e87b3d21fa8d14dc7a5e85a7 | 0e887d0cd010434e101eece419229aa4813ad893 | /image_captioning/data/datasets/coco.py | ddf7c7b787cd99e21faba7310fda4ee6fb6bb0f6 | [] | no_license | congve1/image_captioning | 2c11d3ee80f0836853c7decf1255ac879f7a90b6 | 64cadfb9e072313f45f536f539b3cb8deb0432cd | refs/heads/master | 2020-04-10T07:49:14.748923 | 2019-01-28T10:47:33 | 2019-01-28T10:47:33 | 160,889,848 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,524 | py | import pickle
import json
import os
import torch
import lmdb
import numpy as np
import logging
import time
class COCODataset(torch.utils.data.dataset.Dataset):
def __init__(
self,
root,
att_features_paths_file,
fc_features_paths_file,
encoded_captions_file,
encoded_captions_lens_file,
cocoids_file,
seq_per_img,
**kwargs
):
self.root = root
self.seq_per_img = seq_per_img
with open(att_features_paths_file, 'r') as f:
self.att_features_paths = json.load(f)
with open(fc_features_paths_file, 'r') as f:
self.fc_features_paths = json.load(f)
with open(cocoids_file, 'r') as f:
self.cocoids = json.load(f)
self.encoded_captions = torch.load(encoded_captions_file,
map_location='cpu')
self.encoded_captions_lens = torch.load(encoded_captions_lens_file,
map_location='cpu')
def __getitem__(self, index):
att_feature = torch.load(
self.att_features_paths[index//self.seq_per_img],
map_location='cpu'
)
fc_feature = torch.load(
self.fc_features_paths[index//self.seq_per_img],
map_location='cpu'
)
cap_len = self.encoded_captions_lens[index]
caption = self.encoded_captions[index]
all_captions = self.encoded_captions[
(index//self.seq_per_img)*self.seq_per_img:
((index//self.seq_per_img)+1)*self.seq_per_img
]
cocoid = self.cocoids[index//self.seq_per_img]
data = dict()
data['att_feature'] = att_feature.unsqueeze(0)
data['fc_feature'] = fc_feature.unsqueeze(0)
data['cap_len'] = cap_len
data['caption'] = caption
data['all_captions'] = all_captions
data['cocoid'] = cocoid
return att_feature.unsqueeze(0), fc_feature.unsqueeze(0), caption, cap_len, all_captions, cocoid
def __len__(self):
return len(self.encoded_captions_lens)
class COCODatasetLMDB(torch.utils.data.dataset.Dataset):
def __init__(
self,
root,
att_features_lmdb,
fc_features_lmdb,
encoded_captions_file,
encoded_captions_lens_file,
cocoids_file,
seq_per_img,
att_feature_shape,
fc_feature_shape,
):
self.root = root
self.seq_per_img = seq_per_img
self.att_feature_shape = att_feature_shape
self.fc_feature_shape = fc_feature_shape
with open(cocoids_file, 'r') as f:
self.cocoids = json.load(f)
self.encoded_captions = torch.load(
encoded_captions_file,
map_location='cpu'
)
self.encoded_captions_lens = torch.load(
encoded_captions_lens_file,
map_location='cpu'
)
self.att_features_lmdb = lmdb.open(
att_features_lmdb, readonly=True, max_readers=1,
lock=False, readahead=False, meminit=False
)
self.fc_features_lmdb = lmdb.open(
fc_features_lmdb, readonly=True, max_readers=1,
lock=False, readahead=False, meminit=False
)
def __getitem__(self, index):
att_features_lmdb = self.att_features_lmdb
fc_features_lmdb = self.fc_features_lmdb
cocoid = self.cocoids[index//self.seq_per_img]
cocoid_enc = "{:8d}".format(cocoid).encode()
with att_features_lmdb.begin(write=False) as txn:
att_feature = txn.get(cocoid_enc)
att_feature = np.frombuffer(att_feature, dtype=np.float32)
att_feature = att_feature.reshape(self.att_feature_shape)
att_feature = torch.from_numpy(att_feature)
with fc_features_lmdb.begin(write=False) as txn:
fc_feature = txn.get(cocoid_enc)
fc_feature = np.frombuffer(fc_feature, dtype=np.float32)
fc_feature = fc_feature.reshape(self.fc_feature_shape)
fc_feature = torch.from_numpy(fc_feature)
caption = self.encoded_captions[index]
caption_len = self.encoded_captions_lens[index]
all_captions = self.encoded_captions[
(index//self.seq_per_img)*self.seq_per_img:
((index//self.seq_per_img)+1)*self.seq_per_img
]
return att_feature, fc_feature, caption, caption_len, all_captions, cocoid
def __len__(self):
return len(self.encoded_captions_lens)
| [
"[email protected]"
] | |
8464caf19dff35b15183b1d7669a91eeb8c8a1aa | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02712/s290713931.py | 2f012061dba1cd50f56bd01af90ace84c3b45931 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | def resolve():
n = int(input())
ans = 0
for i in range(1,n+1):
if i%3!=0 and i%5!=0:
ans += i
print(ans)
resolve() | [
"[email protected]"
] | |
d1e7e62a9d44e4e9e7610fc1d65b6f674190a87f | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp39_5000.py | 29c3c85bcf5c2204ff27a92509062b40b32f8297 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,887 | py | ITEM: TIMESTEP
5000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
7.5068537206807662e-01 4.6449314627930377e+01
7.5068537206807662e-01 4.6449314627930377e+01
7.5068537206807662e-01 4.6449314627930377e+01
ITEM: ATOMS id type xs ys zs
8 1 0.121758 0.058572 0.0589443
35 1 0.0657156 0.12252 0.0621003
130 1 0.0596188 0.0579964 0.119268
165 1 0.120955 0.124623 0.123852
4 1 0.998358 0.060293 0.0630413
529 1 0.4999 0.0028381 0.50232
157 1 0.877879 0.00292259 0.127808
1545 1 0.249464 0.498355 0.498707
1437 1 0.872011 0.500278 0.371233
12 1 0.245815 0.054499 0.0646524
39 1 0.183149 0.120109 0.0581227
43 1 0.310374 0.123157 0.0573648
134 1 0.18413 0.0614764 0.12203
138 1 0.310009 0.0594074 0.1251
169 1 0.250985 0.117757 0.11759
133 1 0.123192 0.00101281 0.124813
517 1 0.128825 -0.000420227 0.500043
1435 1 0.808862 0.497389 0.437086
287 1 0.94097 0.00746005 0.31242
1439 1 0.937119 0.49925 0.433094
126 1 0.935772 0.43593 0.00114865
16 1 0.373926 0.0622225 0.063841
47 1 0.431975 0.128575 0.060588
142 1 0.442034 0.0637686 0.123524
173 1 0.372627 0.123082 0.12359
20 1 0.505102 0.0606663 0.0618842
177 1 0.499706 0.124292 0.114763
512 1 0.876805 0.435466 0.43424
1413 1 0.126736 0.498404 0.377649
149 1 0.627315 -0.00373566 0.124541
24 1 0.624646 0.0672208 0.0679062
51 1 0.567129 0.123396 0.067843
146 1 0.55643 0.0623638 0.129298
181 1 0.625827 0.129378 0.129421
511 1 0.938829 0.374069 0.439332
94 1 0.933518 0.31385 0.00328413
153 1 0.751841 0.00210927 0.122782
28 1 0.753254 0.0619327 0.0557759
55 1 0.685718 0.128101 0.0594816
59 1 0.814752 0.126312 0.0624364
150 1 0.684319 0.0671745 0.123824
154 1 0.813859 0.064354 0.119806
185 1 0.749826 0.123459 0.126332
510 1 0.936745 0.436082 0.371372
387 1 0.0652053 0.00426827 0.437261
509 1 0.874709 0.373651 0.380012
151 1 0.688717 0.00236977 0.185034
161 1 0.0036233 0.124917 0.121295
32 1 0.876098 0.0648643 0.063624
63 1 0.938357 0.120754 0.0586687
158 1 0.941356 0.0657517 0.129667
189 1 0.874457 0.120056 0.130156
569 1 0.753055 0.125166 0.502056
155 1 0.812983 0.00690763 0.183537
40 1 0.125086 0.182955 0.0598963
67 1 0.0620775 0.245924 0.0580646
72 1 0.125766 0.310767 0.0627179
162 1 0.0675743 0.189851 0.126325
194 1 0.0620894 0.312353 0.121928
197 1 0.13153 0.248765 0.122406
193 1 -0.00158864 0.25072 0.12488
283 1 0.813054 0.000492586 0.316246
279 1 0.683538 0.00325612 0.309613
277 1 0.628304 0.002408 0.247509
1293 1 0.378026 0.502305 0.24918
65 1 0.997671 0.248942 -0.00195186
44 1 0.252664 0.187673 0.060867
71 1 0.18938 0.247468 0.0572194
75 1 0.316654 0.249191 0.0621181
76 1 0.255657 0.311777 0.0606825
166 1 0.189157 0.185589 0.119575
170 1 0.312539 0.188992 0.12852
198 1 0.191035 0.31549 0.122251
201 1 0.24366 0.250947 0.124443
202 1 0.319594 0.311354 0.125291
630 1 0.68519 0.439551 0.493959
626 1 0.556744 0.439507 0.49568
48 1 0.373101 0.186222 0.0608044
79 1 0.435622 0.252903 0.0638071
80 1 0.375973 0.3119 0.064777
174 1 0.431026 0.18651 0.124345
205 1 0.372361 0.248709 0.128824
206 1 0.436694 0.311619 0.124062
84 1 0.497552 0.31307 0.0625616
209 1 0.498225 0.24896 0.123572
52 1 0.496669 0.190402 0.0596287
56 1 0.627596 0.189689 0.0616254
83 1 0.56717 0.248555 0.055723
88 1 0.625055 0.310131 0.0587769
178 1 0.559983 0.189012 0.125392
210 1 0.566993 0.30622 0.11962
213 1 0.630606 0.246345 0.122209
281 1 0.74729 0.000812234 0.250073
60 1 0.751268 0.183809 0.0569283
87 1 0.687137 0.248891 0.0616348
91 1 0.813028 0.249002 0.0606998
92 1 0.75124 0.310388 0.0593712
182 1 0.698188 0.187218 0.124255
186 1 0.805642 0.193389 0.122938
214 1 0.687007 0.31196 0.126033
217 1 0.750461 0.256795 0.124552
218 1 0.809928 0.314053 0.126518
634 1 0.808358 0.435949 0.497141
546 1 0.0650096 0.181395 0.495536
1561 1 0.748913 0.502005 0.495137
68 1 0.999191 0.305538 0.0611265
36 1 1.00022 0.191796 0.0648101
64 1 0.875636 0.187238 0.0685257
95 1 0.934503 0.246324 0.0658631
96 1 0.875993 0.311539 0.0654339
190 1 0.934385 0.185427 0.129537
221 1 0.871159 0.249006 0.128886
222 1 0.934495 0.310071 0.131591
99 1 0.0636752 0.375702 0.0637083
104 1 0.124966 0.432453 0.0610073
226 1 0.0617737 0.436781 0.123571
229 1 0.12916 0.374246 0.123462
225 1 0.995792 0.373027 0.124833
103 1 0.193444 0.373967 0.061916
107 1 0.315441 0.376126 0.065408
108 1 0.250716 0.437268 0.0591993
230 1 0.187187 0.441444 0.123869
233 1 0.256128 0.373229 0.126387
234 1 0.316938 0.437597 0.120317
263 1 0.189654 -0.00117489 0.308592
53 1 0.626087 0.124695 -0.00112051
19 1 0.563051 0.000464923 0.0662376
111 1 0.440613 0.379951 0.0645694
112 1 0.375435 0.439242 0.0610576
237 1 0.378057 0.379546 0.125126
238 1 0.441019 0.438119 0.124049
116 1 0.50079 0.438875 0.0638467
1411 1 0.0585582 0.494165 0.437284
508 1 0.747951 0.434528 0.432202
21 1 0.631327 -0.000680646 0.00385419
593 1 0.497438 0.250702 0.496495
241 1 0.50091 0.372685 0.124378
115 1 0.559125 0.376714 0.0609987
120 1 0.627586 0.440749 0.0584936
242 1 0.566833 0.439836 0.125204
245 1 0.625692 0.375349 0.122784
118 1 0.688305 0.435379 0.00127316
507 1 0.814042 0.373336 0.434754
506 1 0.813942 0.43782 0.376696
119 1 0.691172 0.370317 0.0585065
123 1 0.808916 0.373964 0.0621546
124 1 0.751649 0.435983 0.0617957
246 1 0.693842 0.44017 0.118864
249 1 0.74973 0.37447 0.125263
250 1 0.813593 0.435008 0.124195
505 1 0.751208 0.373042 0.376369
405 1 0.621404 0.00225951 0.367083
261 1 0.120834 0.00288589 0.252862
100 1 0.9969 0.439546 0.0649233
127 1 0.938058 0.371648 0.0621113
128 1 0.870658 0.440649 0.0593219
253 1 0.872457 0.377462 0.124828
254 1 0.937705 0.435528 0.129625
29 1 0.873961 0.00011144 0.00289383
503 1 0.686836 0.373315 0.433273
136 1 0.121025 0.0614066 0.18618
163 1 0.06323 0.120623 0.186336
258 1 0.0611854 0.0647285 0.248766
264 1 0.122193 0.0607799 0.309324
291 1 0.0661837 0.127426 0.308504
293 1 0.127238 0.126999 0.241331
132 1 0.00617616 0.0572664 0.18476
260 1 0.000757671 0.0653824 0.311059
289 1 0.00118341 0.123875 0.245589
1295 1 0.439093 0.499642 0.31502
597 1 0.625932 0.251855 0.497587
1179 1 0.815015 0.495985 0.186797
49 1 0.498865 0.121995 0.00771856
140 1 0.245071 0.0622996 0.181947
167 1 0.190211 0.125974 0.181876
171 1 0.309709 0.126984 0.189424
262 1 0.182668 0.0636008 0.245119
266 1 0.313478 0.0585878 0.243803
268 1 0.249265 0.0637293 0.303433
295 1 0.187577 0.128377 0.307111
297 1 0.249786 0.122944 0.248843
299 1 0.315891 0.122233 0.304919
403 1 0.559851 -0.000495284 0.438007
393 1 0.249714 0.00275317 0.372882
1541 1 0.121161 0.496888 0.499172
144 1 0.376427 0.0616385 0.188816
175 1 0.436055 0.12153 0.187297
270 1 0.435277 0.0665724 0.252044
272 1 0.3737 0.062903 0.310578
301 1 0.373749 0.127256 0.246671
303 1 0.433748 0.124311 0.315413
276 1 0.500886 0.0597201 0.316422
305 1 0.497364 0.125751 0.253326
502 1 0.684712 0.437359 0.374925
148 1 0.496513 0.0565015 0.190569
152 1 0.622572 0.0592656 0.181894
179 1 0.555401 0.126118 0.187101
274 1 0.565191 0.0650265 0.246389
280 1 0.620876 0.0691536 0.311418
307 1 0.559319 0.125999 0.312231
309 1 0.628875 0.12599 0.248062
514 1 0.0640562 0.0626954 0.49734
156 1 0.751345 0.0643039 0.189544
183 1 0.68793 0.127598 0.190361
187 1 0.812362 0.120584 0.189297
278 1 0.686025 0.064702 0.25297
282 1 0.813166 0.0608465 0.249989
284 1 0.750295 0.0581787 0.311199
311 1 0.692462 0.124184 0.31822
313 1 0.7505 0.125176 0.253121
315 1 0.816728 0.12192 0.310265
160 1 0.876842 0.0657998 0.192061
191 1 0.939783 0.125214 0.185331
286 1 0.940792 0.0679575 0.248748
288 1 0.874652 0.0609606 0.314595
317 1 0.872687 0.127143 0.251116
319 1 0.939618 0.127404 0.3139
558 1 0.439104 0.191641 0.495904
159 1 0.939554 0.00388893 0.187451
168 1 0.130981 0.186064 0.183337
195 1 0.0662406 0.248504 0.186608
200 1 0.12462 0.309844 0.183955
290 1 0.0649318 0.187681 0.246449
296 1 0.129034 0.192133 0.314016
322 1 0.0620802 0.308843 0.252227
323 1 0.0613101 0.247859 0.313334
325 1 0.128432 0.254414 0.254625
328 1 0.123509 0.311927 0.31467
164 1 0.0049422 0.187174 0.184884
292 1 1.00186 0.181017 0.313505
321 1 0.00162419 0.243766 0.246965
172 1 0.245328 0.190286 0.185914
199 1 0.189675 0.252217 0.187067
203 1 0.312181 0.253009 0.189691
204 1 0.250082 0.311463 0.182997
294 1 0.18267 0.188915 0.249941
298 1 0.315182 0.192713 0.246007
300 1 0.250008 0.186311 0.311657
326 1 0.189792 0.312582 0.250856
327 1 0.186067 0.25531 0.317331
329 1 0.247412 0.248668 0.254271
330 1 0.310715 0.316876 0.250867
331 1 0.31594 0.249602 0.30715
332 1 0.252521 0.316058 0.3104
176 1 0.375086 0.187273 0.189353
207 1 0.438346 0.251413 0.184849
208 1 0.37761 0.311936 0.190053
302 1 0.437205 0.18358 0.249734
304 1 0.381255 0.185957 0.310839
333 1 0.379612 0.251136 0.251415
334 1 0.438691 0.313175 0.251342
335 1 0.43926 0.251261 0.313341
336 1 0.375568 0.315865 0.314962
308 1 0.500299 0.185899 0.312454
212 1 0.502669 0.310199 0.186458
180 1 0.497223 0.186079 0.187428
340 1 0.495686 0.315478 0.314796
337 1 0.504312 0.249428 0.254915
184 1 0.62028 0.188628 0.190794
211 1 0.563571 0.250133 0.186314
216 1 0.622064 0.308594 0.187609
306 1 0.5649 0.187583 0.252212
312 1 0.623082 0.185981 0.31229
338 1 0.56547 0.311684 0.24645
339 1 0.561926 0.249528 0.315014
341 1 0.624547 0.248136 0.251119
344 1 0.626478 0.309389 0.314662
188 1 0.7524 0.189035 0.192408
215 1 0.686782 0.25233 0.189017
219 1 0.811231 0.255884 0.189892
220 1 0.748175 0.314171 0.192363
310 1 0.689192 0.189126 0.251463
314 1 0.812614 0.18689 0.256784
316 1 0.749651 0.184053 0.316675
342 1 0.686997 0.311207 0.254139
343 1 0.686184 0.248 0.315239
345 1 0.750841 0.251459 0.250534
346 1 0.812882 0.314554 0.253469
347 1 0.811721 0.25092 0.314639
348 1 0.746014 0.308483 0.314289
324 1 1.00097 0.309314 0.314811
196 1 0.00635607 0.306134 0.184186
192 1 0.870776 0.185378 0.18257
223 1 0.938148 0.245643 0.18817
224 1 0.870532 0.314837 0.187698
318 1 0.933926 0.186584 0.246966
320 1 0.878269 0.188893 0.309845
349 1 0.87574 0.250895 0.249268
350 1 0.938863 0.312436 0.244878
351 1 0.940205 0.247113 0.308881
352 1 0.875276 0.311819 0.313365
227 1 0.0609381 0.374037 0.185224
232 1 0.126789 0.436061 0.185524
354 1 0.0687151 0.43646 0.245666
355 1 0.0648274 0.374729 0.314661
357 1 0.124953 0.371541 0.252105
360 1 0.126141 0.435232 0.310228
353 1 0.00737459 0.378881 0.248953
228 1 0.00381557 0.436524 0.188822
267 1 0.312093 -0.00049466 0.307208
497 1 0.49499 0.377431 0.372602
231 1 0.187222 0.372828 0.19109
235 1 0.313505 0.374949 0.189294
236 1 0.248646 0.433923 0.182088
358 1 0.192508 0.439827 0.244313
359 1 0.187587 0.379727 0.312496
361 1 0.250858 0.381277 0.250927
362 1 0.314139 0.437583 0.249859
363 1 0.313008 0.3777 0.309896
364 1 0.253606 0.438689 0.31572
239 1 0.441768 0.372497 0.190964
240 1 0.378127 0.435464 0.189936
365 1 0.377515 0.37445 0.252692
366 1 0.440855 0.441743 0.253763
367 1 0.434434 0.375493 0.312563
368 1 0.379997 0.439741 0.308989
244 1 0.501217 0.440576 0.187389
369 1 0.503724 0.376941 0.249609
1309 1 0.875994 0.496591 0.252356
1181 1 0.874792 0.49866 0.127367
1565 1 0.875756 0.498908 0.499188
372 1 0.508022 0.436674 0.312475
243 1 0.56229 0.371895 0.181302
248 1 0.632584 0.437727 0.18616
370 1 0.567735 0.433438 0.246972
371 1 0.567269 0.372743 0.312967
373 1 0.63176 0.378234 0.255137
376 1 0.622851 0.436406 0.313314
1287 1 0.189665 0.495646 0.309865
501 1 0.621857 0.372332 0.372864
247 1 0.687484 0.375408 0.190952
251 1 0.810412 0.374425 0.190794
252 1 0.753058 0.434795 0.185291
374 1 0.688513 0.439401 0.255694
375 1 0.690382 0.374161 0.316567
377 1 0.747358 0.373712 0.254308
378 1 0.814447 0.435486 0.251336
379 1 0.811543 0.377306 0.31407
380 1 0.748318 0.439322 0.314531
17 1 0.497943 0.00276012 0.00291542
498 1 0.561003 0.44036 0.376134
385 1 0.999865 0.000934292 0.376688
504 1 0.621817 0.43511 0.435609
1285 1 0.128438 0.499969 0.245161
356 1 0.00184031 0.439498 0.312041
255 1 0.941522 0.374264 0.187374
256 1 0.871888 0.436971 0.190209
381 1 0.874326 0.378328 0.251864
382 1 0.943787 0.43293 0.249767
383 1 0.938133 0.373349 0.310048
384 1 0.874534 0.438204 0.311295
499 1 0.558973 0.368158 0.433046
386 1 0.064877 0.0636408 0.374009
392 1 0.129404 0.0575586 0.432717
419 1 0.0670459 0.1273 0.434088
421 1 0.130044 0.12216 0.370122
417 1 0.00544874 0.12703 0.375008
388 1 -0.000244197 0.0637013 0.442593
1283 1 0.0682755 0.49879 0.314322
500 1 0.495769 0.43184 0.438929
1429 1 0.618668 0.504868 0.374214
494 1 0.439654 0.439615 0.379303
390 1 0.18843 0.0613245 0.370132
394 1 0.312685 0.0589823 0.378104
396 1 0.250094 0.0643323 0.438472
423 1 0.189701 0.123093 0.434757
425 1 0.252303 0.118784 0.371318
427 1 0.310632 0.1241 0.435746
1171 1 0.568008 0.499429 0.182841
398 1 0.439975 0.0591869 0.372688
400 1 0.379241 0.0638334 0.442318
429 1 0.375426 0.124688 0.378197
431 1 0.4365 0.125335 0.436723
433 1 0.499048 0.124148 0.372995
404 1 0.496377 0.0651992 0.437046
496 1 0.376009 0.440814 0.437518
415 1 0.935124 5.72347e-05 0.445273
409 1 0.745756 -0.000689034 0.371486
402 1 0.560739 0.059479 0.368698
408 1 0.622335 0.0592218 0.439296
435 1 0.561079 0.124158 0.439661
437 1 0.623559 0.125218 0.378577
1291 1 0.314864 0.496423 0.311275
406 1 0.684947 0.0599263 0.374174
410 1 0.815994 0.0639174 0.379697
412 1 0.749387 0.061466 0.437791
439 1 0.683923 0.124642 0.43735
441 1 0.749736 0.121159 0.375072
443 1 0.811818 0.122212 0.437563
1031 1 0.188034 0.499609 0.062535
493 1 0.370022 0.378832 0.377106
414 1 0.93692 0.0615694 0.379828
416 1 0.873608 0.0616969 0.440451
445 1 0.878736 0.121915 0.378693
447 1 0.93903 0.121833 0.43909
418 1 0.0624907 0.187799 0.373822
424 1 0.12709 0.187881 0.437602
450 1 0.0616973 0.30977 0.3745
451 1 0.0612117 0.246298 0.439194
453 1 0.126112 0.245237 0.376643
456 1 0.119273 0.313442 0.438764
452 1 0.997035 0.312531 0.437166
573 1 0.871482 0.12432 0.497534
422 1 0.193485 0.186957 0.378415
426 1 0.313403 0.185759 0.369218
428 1 0.256748 0.196372 0.436127
454 1 0.183375 0.315188 0.376502
455 1 0.186643 0.249465 0.437483
457 1 0.252139 0.253187 0.376602
458 1 0.309407 0.31475 0.372479
459 1 0.314808 0.252292 0.438579
460 1 0.247343 0.309088 0.439929
1409 1 0.00174099 0.498602 0.37189
257 1 0.998864 0.00260438 0.252913
521 1 0.256174 -0.000763557 0.498856
430 1 0.435266 0.186431 0.375228
432 1 0.377291 0.193068 0.436476
461 1 0.376363 0.248815 0.369227
462 1 0.4337 0.314681 0.374554
463 1 0.434929 0.254086 0.434956
464 1 0.372315 0.316552 0.43115
468 1 0.496623 0.312206 0.434692
436 1 0.499354 0.184368 0.439305
1421 1 0.373684 0.496454 0.377061
1055 1 0.937527 0.494366 0.0645669
465 1 0.497851 0.245441 0.375794
434 1 0.561656 0.184707 0.374857
440 1 0.625771 0.188291 0.43779
466 1 0.562517 0.3068 0.375596
467 1 0.562669 0.24671 0.436285
469 1 0.627358 0.250604 0.372349
472 1 0.624344 0.312861 0.438128
1029 1 0.127575 0.498452 0.00406185
395 1 0.315662 0.00480552 0.440841
489 1 0.248989 0.372069 0.376717
474 1 0.808983 0.310106 0.37763
473 1 0.747976 0.2455 0.378493
471 1 0.687224 0.25308 0.439811
470 1 0.68893 0.312048 0.37819
476 1 0.750264 0.313718 0.440371
475 1 0.81331 0.244447 0.438752
438 1 0.688248 0.185105 0.376755
442 1 0.815682 0.185215 0.372668
444 1 0.751112 0.187272 0.439451
486 1 0.187087 0.43757 0.373392
1419 1 0.312927 0.498066 0.437536
420 1 0.00274823 0.18393 0.438084
449 1 0.998735 0.245569 0.374978
448 1 0.87644 0.186307 0.43644
446 1 0.940905 0.183089 0.379532
480 1 0.873181 0.308411 0.437074
478 1 0.934562 0.313423 0.376982
479 1 0.93882 0.247799 0.43924
477 1 0.877912 0.249941 0.374555
1047 1 0.691523 0.497523 0.059984
490 1 0.30521 0.437248 0.380208
481 1 -0.00139533 0.372541 0.369746
483 1 0.0595194 0.374613 0.432325
485 1 0.12642 0.374417 0.373764
482 1 0.0617108 0.434056 0.371718
484 1 0.998784 0.439876 0.43707
488 1 0.121673 0.434201 0.441063
495 1 0.432605 0.374863 0.439407
1051 1 0.809304 0.49941 0.0567784
491 1 0.313951 0.37703 0.440025
487 1 0.182825 0.377333 0.43545
618 1 0.319445 0.440074 0.502059
492 1 0.246527 0.434032 0.437157
125 1 0.876371 0.378322 0.00254029
557 1 0.37545 0.130688 0.499845
553 1 0.252846 0.12515 0.498654
582 1 0.185908 0.309419 0.496193
578 1 0.0617518 0.310891 0.497621
538 1 0.810159 0.0639959 0.498127
594 1 0.56288 0.3154 0.49927
121 1 0.750357 0.372788 0.000433584
601 1 0.747641 0.250041 0.501852
90 1 0.814572 0.311062 0.00171089
613 1 0.123851 0.37395 0.499244
621 1 0.374282 0.376939 0.501238
518 1 0.190035 0.063357 0.497535
622 1 0.432884 0.440806 0.5037
533 1 0.623938 -0.000145783 0.496925
629 1 0.622314 0.37576 0.497223
550 1 0.187098 0.188303 0.500558
1553 1 0.500665 0.502998 0.497335
109 1 0.373667 0.374696 0.000291177
625 1 0.492717 0.372066 0.496932
61 1 0.875302 0.122114 -0.00198008
10 1 0.311793 0.0535469 0.000124659
41 1 0.253993 0.120961 1.58014e-05
78 1 0.436094 0.316141 0.00302446
549 1 0.127649 0.122337 0.49693
117 1 0.627605 0.378247 0.00473189
114 1 0.55773 0.435181 -0.000762633
105 1 0.255909 0.374148 0.00321326
70 1 0.186348 0.307939 0.00265403
54 1 0.689772 0.186742 -0.000555825
14 1 0.431188 0.0612439 0.00674459
93 1 0.875572 0.248555 0.00145584
2 1 0.0574664 0.0581058 -0.000244689
22 1 0.687998 0.0605449 0.00340977
98 1 0.0641755 0.432895 0.00456919
74 1 0.316258 0.309007 0.0020419
97 1 1.00101 0.373305 0.00180931
18 1 0.564557 0.0607683 0.00303077
81 1 0.497673 0.249871 -0.000734111
73 1 0.254072 0.251463 0.00011784
50 1 0.558294 0.183935 0.00613584
62 1 0.935075 0.184814 0.00076914
1033 1 0.25193 0.500914 0.0034667
101 1 0.127435 0.374932 0.000771703
520 1 0.126347 0.0621627 0.561358
547 1 0.0601259 0.124776 0.557245
642 1 0.066521 0.0674857 0.621459
677 1 0.12641 0.128299 0.62336
673 1 0.00202445 0.13024 0.621019
516 1 1.00037 0.0588065 0.560662
1025 1 0.996916 0.499407 0.998923
524 1 0.252123 0.0604567 0.562308
551 1 0.19087 0.121406 0.560148
555 1 0.312639 0.124845 0.564346
646 1 0.192098 0.0642654 0.627797
650 1 0.31149 0.0654973 0.626587
681 1 0.251661 0.131008 0.624964
996 1 0.00107826 0.436774 0.937315
534 1 0.686281 0.0642217 0.498538
519 1 0.190127 -0.000218844 0.559619
528 1 0.374129 0.0654484 0.56473
559 1 0.435473 0.13026 0.559615
654 1 0.440981 0.0655679 0.618902
685 1 0.370277 0.125181 0.628748
532 1 0.501435 0.0694454 0.562196
515 1 0.0628206 0.00647217 0.558313
1921 1 0.00115534 0.497472 0.874509
1024 1 0.876592 0.436727 0.933702
689 1 0.502004 0.126014 0.627058
536 1 0.623459 0.0686708 0.561101
563 1 0.560992 0.130997 0.567443
658 1 0.562816 0.0679724 0.623046
693 1 0.628689 0.129969 0.627219
1925 1 0.127782 0.49882 0.878824
540 1 0.747524 0.0632775 0.566718
567 1 0.684687 0.125822 0.560796
571 1 0.807582 0.123752 0.561071
662 1 0.686335 0.0612372 0.626329
666 1 0.808974 0.0641091 0.628617
697 1 0.747783 0.126788 0.628297
577 1 0.0024803 0.243415 0.504426
793 1 0.74884 0.00145008 0.751619
544 1 0.870912 0.062261 0.560401
575 1 0.935708 0.127166 0.562369
670 1 0.938012 0.0652989 0.626683
701 1 0.876719 0.12823 0.6241
602 1 0.80961 0.313318 0.503876
527 1 0.441533 0.000190652 0.562335
797 1 0.873885 0.000192011 0.752661
1923 1 0.0648927 0.49384 0.938456
552 1 0.128444 0.188909 0.56031
579 1 0.0623633 0.25144 0.561487
584 1 0.130436 0.317502 0.562526
674 1 0.0678005 0.189598 0.623621
706 1 0.0692976 0.310244 0.629504
709 1 0.130847 0.25305 0.627392
580 1 0.00361365 0.311059 0.56572
705 1 0.00455333 0.250184 0.628653
581 1 0.118906 0.253434 0.502983
556 1 0.251762 0.190185 0.564679
583 1 0.185307 0.252261 0.563623
587 1 0.313258 0.251586 0.567029
588 1 0.250699 0.30791 0.56411
678 1 0.187236 0.187732 0.625021
682 1 0.310346 0.190875 0.629857
710 1 0.188049 0.314638 0.624194
713 1 0.244691 0.25128 0.624284
714 1 0.309363 0.310771 0.627737
667 1 0.810818 0.00284162 0.690633
663 1 0.687762 0.00187379 0.692127
1053 1 0.876981 0.497275 0.996213
560 1 0.369902 0.188325 0.568292
591 1 0.434084 0.244099 0.565408
592 1 0.373883 0.308857 0.56644
686 1 0.439691 0.185726 0.627921
717 1 0.377547 0.244943 0.629304
718 1 0.436701 0.311927 0.622748
596 1 0.496777 0.30805 0.55794
781 1 0.37394 0.00168455 0.748374
899 1 0.0605541 0.00225163 0.930636
785 1 0.501451 0.00288286 0.750465
564 1 0.500265 0.191332 0.564006
721 1 0.497337 0.249582 0.628271
568 1 0.624153 0.190292 0.562079
595 1 0.565305 0.249681 0.561779
600 1 0.617267 0.315835 0.56234
690 1 0.563307 0.189245 0.624029
722 1 0.561434 0.312495 0.625077
725 1 0.627097 0.248534 0.627351
522 1 0.314563 0.0676797 0.500465
572 1 0.749762 0.184839 0.562293
599 1 0.683557 0.257447 0.559728
603 1 0.806566 0.248331 0.562979
604 1 0.746783 0.314032 0.56197
694 1 0.69193 0.188261 0.626218
698 1 0.808999 0.185673 0.625035
726 1 0.685065 0.314654 0.626426
729 1 0.741306 0.249149 0.623144
730 1 0.804958 0.312588 0.620183
548 1 0.00341449 0.189785 0.562142
576 1 0.87049 0.186972 0.560913
607 1 0.941137 0.251862 0.563788
608 1 0.875283 0.311779 0.568197
702 1 0.941061 0.190543 0.626345
733 1 0.875607 0.250526 0.622121
734 1 0.936675 0.312675 0.623909
530 1 0.565077 0.0639073 0.500453
791 1 0.687933 0.000629886 0.815558
606 1 0.93879 0.311472 0.505514
611 1 0.0608009 0.377099 0.56777
616 1 0.124974 0.438803 0.561301
738 1 0.0638334 0.443524 0.628356
741 1 0.122935 0.379267 0.625218
565 1 0.620971 0.130746 0.501995
615 1 0.187519 0.375378 0.558169
619 1 0.319554 0.371103 0.566676
620 1 0.250536 0.435017 0.564973
742 1 0.183175 0.441162 0.624257
745 1 0.249529 0.378725 0.622201
746 1 0.313876 0.437082 0.625788
1815 1 0.686058 0.499688 0.807529
1543 1 0.18694 0.501266 0.567065
897 1 0.998749 0.0028245 0.871997
1673 1 0.252741 0.499618 0.6212
623 1 0.434563 0.374283 0.564211
624 1 0.376514 0.43895 0.564143
749 1 0.378338 0.373248 0.630675
750 1 0.43917 0.434081 0.621447
628 1 0.500731 0.440017 0.560138
773 1 0.124118 0.00519189 0.748547
574 1 0.934912 0.189016 0.503339
643 1 0.0608905 0.00631407 0.683468
610 1 0.0610486 0.432385 0.503139
1677 1 0.374632 0.498823 0.62102
753 1 0.497118 0.372556 0.62028
627 1 0.560498 0.379374 0.561243
632 1 0.623485 0.435459 0.557836
754 1 0.555659 0.441571 0.621252
757 1 0.619625 0.376848 0.627592
1023 1 0.938772 0.375945 0.940173
769 1 0.996347 0.000678135 0.750923
617 1 0.252133 0.372706 0.504653
1022 1 0.938985 0.43819 0.875554
631 1 0.681344 0.37586 0.562646
635 1 0.812524 0.376208 0.563026
636 1 0.747519 0.437272 0.555423
758 1 0.684803 0.435183 0.620203
761 1 0.745557 0.378214 0.621513
762 1 0.807839 0.436123 0.624517
1021 1 0.875072 0.372394 0.883398
771 1 0.0618268 0.00500239 0.808597
612 1 0.999598 0.437622 0.570134
737 1 0.000824907 0.37275 0.626886
639 1 0.93695 0.375761 0.562903
640 1 0.869398 0.436701 0.563827
765 1 0.87617 0.372017 0.627045
766 1 0.936758 0.437935 0.624495
1951 1 0.937366 0.498583 0.935587
42 1 0.314434 0.187761 1.00119
648 1 0.123312 0.0670946 0.683689
675 1 0.0665792 0.127853 0.682936
770 1 0.0618022 0.0683261 0.751108
776 1 0.12438 0.0630865 0.811577
803 1 0.0621952 0.126394 0.809845
805 1 0.121451 0.128145 0.747279
644 1 0.001552 0.0668854 0.683088
1945 1 0.748211 0.499928 0.877335
1683 1 0.562316 0.503278 0.684983
652 1 0.24775 0.0648802 0.687819
679 1 0.18836 0.123478 0.686951
683 1 0.310815 0.122081 0.692633
774 1 0.184573 0.0637662 0.751167
778 1 0.309861 0.0597477 0.747788
780 1 0.248527 0.0613747 0.812806
807 1 0.187075 0.122308 0.815035
809 1 0.245521 0.125936 0.752047
811 1 0.308772 0.122839 0.807285
1685 1 0.621736 0.495308 0.621509
783 1 0.43481 0.000620293 0.812497
656 1 0.378338 0.0597646 0.683506
687 1 0.434784 0.123761 0.685947
782 1 0.43724 0.0606781 0.753273
784 1 0.371186 0.0614025 0.811205
813 1 0.376699 0.122274 0.754824
815 1 0.439096 0.126024 0.815079
1020 1 0.746876 0.435822 0.934793
1019 1 0.81245 0.374206 0.942003
788 1 0.496651 0.0634725 0.815363
660 1 0.497156 0.0666959 0.685204
817 1 0.496773 0.126424 0.749206
664 1 0.631763 0.0651053 0.690325
691 1 0.562712 0.124755 0.692614
786 1 0.56492 0.0693013 0.756573
792 1 0.624028 0.0632781 0.816277
819 1 0.563964 0.12919 0.820524
821 1 0.629565 0.128479 0.758828
1018 1 0.816685 0.438211 0.877858
668 1 0.749235 0.0636167 0.688623
695 1 0.689683 0.128789 0.692346
699 1 0.813637 0.125285 0.685876
790 1 0.688305 0.0609989 0.753427
794 1 0.813534 0.0647293 0.75236
796 1 0.75147 0.0648768 0.817274
823 1 0.68936 0.125548 0.819435
825 1 0.751266 0.124986 0.751807
827 1 0.814743 0.128337 0.80921
1821 1 0.869483 0.497716 0.749254
1017 1 0.751274 0.371841 0.874893
772 1 1.00134 0.0636102 0.810444
801 1 1.00114 0.130414 0.751347
672 1 0.875452 0.063867 0.684633
703 1 0.937936 0.127616 0.685502
798 1 0.940371 0.0698378 0.74924
800 1 0.875612 0.0628713 0.811362
829 1 0.876339 0.126959 0.748747
831 1 0.938427 0.127979 0.806915
680 1 0.125427 0.190676 0.688808
707 1 0.0665544 0.251306 0.687671
712 1 0.12902 0.319033 0.688077
802 1 0.0645018 0.189444 0.75579
808 1 0.124175 0.187503 0.814951
834 1 0.055776 0.318029 0.749383
835 1 0.0604769 0.251483 0.80748
837 1 0.126139 0.250736 0.752781
840 1 0.128881 0.308875 0.814949
804 1 1.0016 0.189763 0.813436
708 1 0.00117191 0.311007 0.68719
684 1 0.24595 0.188052 0.688019
711 1 0.188821 0.249572 0.691854
715 1 0.30924 0.247335 0.689234
716 1 0.247625 0.307526 0.685924
806 1 0.187468 0.188387 0.752231
810 1 0.307229 0.187266 0.746687
812 1 0.25236 0.184866 0.814522
838 1 0.186049 0.313417 0.754577
839 1 0.191481 0.245494 0.815729
841 1 0.249976 0.249182 0.751222
842 1 0.310913 0.311209 0.750741
843 1 0.311907 0.246027 0.814581
844 1 0.25255 0.311868 0.813113
688 1 0.37525 0.187307 0.693377
719 1 0.440223 0.249058 0.689747
720 1 0.37128 0.311096 0.686922
814 1 0.435684 0.185744 0.753532
816 1 0.374164 0.18816 0.813645
845 1 0.372635 0.248091 0.748422
846 1 0.436091 0.312462 0.755121
847 1 0.4303 0.249066 0.815209
848 1 0.367972 0.31073 0.815357
849 1 0.496408 0.24856 0.752863
724 1 0.500845 0.316069 0.687949
692 1 0.499222 0.182466 0.686809
852 1 0.501179 0.316192 0.812801
820 1 0.49918 0.190216 0.815343
696 1 0.623865 0.191398 0.697133
723 1 0.560032 0.253962 0.688293
728 1 0.623346 0.313172 0.685211
818 1 0.559448 0.189202 0.751733
824 1 0.626459 0.189493 0.818535
850 1 0.563524 0.31792 0.751718
851 1 0.563661 0.250127 0.814962
853 1 0.623049 0.253801 0.75712
856 1 0.62577 0.31451 0.815248
700 1 0.750544 0.184788 0.689998
727 1 0.685448 0.250993 0.694084
731 1 0.807927 0.249388 0.684982
732 1 0.748546 0.314427 0.690306
822 1 0.690456 0.188642 0.756322
826 1 0.812911 0.189694 0.748737
828 1 0.752009 0.186361 0.815935
854 1 0.689714 0.31333 0.749631
855 1 0.691612 0.24878 0.816126
857 1 0.750259 0.24991 0.749616
858 1 0.810397 0.310238 0.748201
859 1 0.815373 0.2507 0.813521
860 1 0.7484 0.311878 0.813233
676 1 0.00391129 0.194227 0.689276
836 1 0.994684 0.311723 0.809612
833 1 0.999155 0.25082 0.750977
704 1 0.87526 0.189042 0.685945
735 1 0.935744 0.250903 0.688561
736 1 0.876245 0.307972 0.685358
830 1 0.941066 0.189004 0.745056
832 1 0.874886 0.188287 0.808429
861 1 0.87461 0.252028 0.75047
862 1 0.939524 0.313524 0.747904
863 1 0.940096 0.249043 0.812767
864 1 0.876725 0.314199 0.813054
739 1 0.0646925 0.375063 0.682799
744 1 0.129716 0.434607 0.692162
866 1 0.0657112 0.433651 0.746848
867 1 0.06211 0.374102 0.811055
869 1 0.127403 0.378778 0.753057
872 1 0.124514 0.439156 0.811476
868 1 0.00129738 0.439308 0.813766
740 1 1.00186 0.438995 0.68496
865 1 0.995426 0.381542 0.752081
1015 1 0.690871 0.374874 0.938724
1014 1 0.686703 0.4371 0.87353
777 1 0.243314 -0.00236871 0.749398
651 1 0.309999 -0.00175909 0.685734
743 1 0.187312 0.375265 0.685216
747 1 0.311336 0.371111 0.68595
748 1 0.249134 0.434417 0.680361
870 1 0.189568 0.435602 0.750248
871 1 0.187698 0.375258 0.816381
873 1 0.250276 0.377825 0.752079
874 1 0.310964 0.435699 0.745785
875 1 0.312006 0.379924 0.809235
876 1 0.249128 0.437763 0.811231
586 1 0.318015 0.317025 0.50111
751 1 0.442905 0.377379 0.688431
752 1 0.372756 0.438137 0.687401
877 1 0.372192 0.375817 0.741384
878 1 0.435312 0.438523 0.745934
879 1 0.435145 0.375941 0.81091
880 1 0.372484 0.44053 0.810905
881 1 0.498003 0.37756 0.751346
661 1 0.623368 0.00613988 0.623461
884 1 0.499817 0.435115 0.813438
756 1 0.498158 0.441874 0.684633
755 1 0.5587 0.379326 0.683169
760 1 0.623787 0.439932 0.684579
882 1 0.565077 0.433888 0.751252
883 1 0.560312 0.374768 0.817505
885 1 0.626993 0.373119 0.75257
888 1 0.626454 0.434394 0.814017
1013 1 0.62636 0.372697 0.870817
1011 1 0.559902 0.373683 0.93155
759 1 0.684327 0.373132 0.689474
763 1 0.805582 0.376801 0.688946
764 1 0.745321 0.44181 0.687193
886 1 0.682741 0.440665 0.745401
887 1 0.691054 0.375739 0.810848
889 1 0.750639 0.378059 0.752701
890 1 0.815239 0.433561 0.754928
891 1 0.812501 0.370156 0.812942
892 1 0.7518 0.437118 0.81646
767 1 0.93473 0.377305 0.689875
768 1 0.869606 0.433609 0.683018
893 1 0.875511 0.368356 0.746367
894 1 0.935224 0.438138 0.750758
895 1 0.941298 0.376207 0.81575
896 1 0.875859 0.432416 0.814532
543 1 0.936602 0.00516388 0.563019
898 1 0.0640299 0.0621665 0.873452
904 1 0.12302 0.0604664 0.938518
931 1 0.0550696 0.124007 0.939053
933 1 0.126203 0.124722 0.879944
900 1 1.00077 0.0600465 0.93888
1016 1 0.622489 0.431861 0.936818
1943 1 0.68154 0.498891 0.935323
1809 1 0.500773 0.494338 0.749713
902 1 0.184369 0.0587125 0.87489
906 1 0.311673 0.0588984 0.874255
908 1 0.250948 0.0575788 0.935326
935 1 0.18708 0.121024 0.934634
937 1 0.247958 0.122108 0.875593
939 1 0.312844 0.122111 0.938531
535 1 0.685664 0.00598196 0.563762
1671 1 0.193209 0.49644 0.690638
523 1 0.314908 0.00219211 0.565088
1010 1 0.561271 0.437467 0.876238
1667 1 0.0686599 0.499685 0.696245
910 1 0.438158 0.0581569 0.879641
912 1 0.369886 0.0611745 0.94035
941 1 0.373737 0.115849 0.874698
943 1 0.434257 0.122023 0.937225
945 1 0.496318 0.129174 0.877576
1801 1 0.250536 0.499612 0.751874
122 1 0.810233 0.434407 0.99724
659 1 0.557639 0.0107309 0.691152
645 1 0.128784 0.00157827 0.624922
913 1 0.50235 0.00189444 0.873804
641 1 -0.000756542 0.00106895 0.622664
916 1 0.502609 0.060089 0.936461
914 1 0.560998 0.0647564 0.877202
920 1 0.621743 0.0640805 0.938186
947 1 0.559321 0.127514 0.939661
949 1 0.627741 0.123137 0.88159
671 1 0.933959 0.00347584 0.688856
787 1 0.562487 0.00586293 0.818398
918 1 0.689549 0.0612183 0.877842
922 1 0.816079 0.0645706 0.874412
924 1 0.752925 0.0590491 0.941435
951 1 0.687906 0.123771 0.937229
953 1 0.753563 0.125931 0.879885
955 1 0.812957 0.122471 0.939929
901 1 0.12611 0.00301738 0.870431
649 1 0.247751 0.00261608 0.624346
789 1 0.621214 0.000167579 0.753748
1691 1 0.808507 0.499999 0.68856
929 1 1.00024 0.123151 0.872979
926 1 0.935197 0.066132 0.876304
928 1 0.872329 0.0591538 0.934159
957 1 0.873841 0.123818 0.871585
959 1 0.937064 0.126174 0.937385
26 1 0.816972 0.058596 0.995949
1009 1 0.495404 0.372963 0.874275
1555 1 0.564912 0.502359 0.559064
930 1 0.0626097 0.186416 0.873993
936 1 0.127431 0.187745 0.937875
962 1 0.0634968 0.307161 0.873532
963 1 0.0651595 0.244297 0.939516
965 1 0.122997 0.241722 0.873603
968 1 0.1271 0.307948 0.935178
964 1 0.995754 0.310382 0.931822
110 1 0.432568 0.440981 0.998731
934 1 0.189076 0.185891 0.876061
938 1 0.307457 0.183376 0.874036
940 1 0.253027 0.183523 0.936463
966 1 0.187824 0.312407 0.876674
967 1 0.192982 0.24765 0.940071
969 1 0.248142 0.24798 0.87765
970 1 0.313278 0.314461 0.880242
971 1 0.312083 0.24903 0.939887
972 1 0.251274 0.311318 0.939989
942 1 0.436084 0.186753 0.874924
944 1 0.372966 0.186816 0.933052
973 1 0.374303 0.247706 0.874374
974 1 0.438665 0.308098 0.873307
975 1 0.435177 0.251389 0.936999
976 1 0.379448 0.309816 0.937749
980 1 0.497012 0.309514 0.941775
977 1 0.497988 0.246386 0.876171
948 1 0.493136 0.190613 0.939258
657 1 0.504531 0.0047631 0.620169
1819 1 0.812343 0.496875 0.814488
33 1 0.996929 0.124486 1.00188
1929 1 0.259335 0.495642 0.86907
946 1 0.560156 0.188446 0.879471
952 1 0.623099 0.186315 0.939396
978 1 0.557369 0.307554 0.875047
979 1 0.559212 0.248356 0.942579
981 1 0.623173 0.250192 0.878149
984 1 0.625267 0.313535 0.939608
1012 1 0.502102 0.43888 0.934178
85 1 0.626347 0.25121 0.998034
1001 1 0.24638 0.374311 0.877399
988 1 0.750374 0.311557 0.937225
987 1 0.816827 0.249375 0.933857
986 1 0.809811 0.31041 0.877438
985 1 0.750491 0.247047 0.87704
983 1 0.686316 0.247599 0.93911
982 1 0.689072 0.31125 0.872421
950 1 0.687982 0.189976 0.877225
954 1 0.819426 0.187536 0.873459
956 1 0.751143 0.184463 0.935504
1005 1 0.375409 0.372836 0.872213
961 1 1.0022 0.247292 0.88078
960 1 0.875278 0.181898 0.932087
932 1 0.998039 0.185514 0.932971
990 1 0.934697 0.315372 0.873785
991 1 0.936344 0.241397 0.937213
992 1 0.87515 0.312354 0.942937
989 1 0.879335 0.250676 0.87025
958 1 0.936444 0.184389 0.870943
1004 1 0.253254 0.433254 0.935229
1003 1 0.313093 0.371151 0.939597
999 1 0.188545 0.373335 0.936837
102 1 0.194564 0.433573 0.995777
998 1 0.183243 0.436754 0.877322
993 1 0.00250488 0.3744 0.87416
994 1 0.0637793 0.433975 0.871452
995 1 0.0628693 0.370792 0.937168
1000 1 0.127772 0.43563 0.940383
997 1 0.12368 0.374368 0.872891
1002 1 0.314033 0.434919 0.876625
637 1 0.876127 0.375045 0.503148
1006 1 0.436686 0.445062 0.873065
1008 1 0.374663 0.446123 0.933781
1007 1 0.434534 0.374356 0.935109
86 1 0.688836 0.311072 0.999864
46 1 0.432714 0.187977 0.99595
1559 1 0.687137 0.499089 0.557045
1689 1 0.748333 0.495859 0.624712
1931 1 0.306956 0.500659 0.939743
113 1 0.493144 0.378954 0.998643
106 1 0.314148 0.441271 0.997907
45 1 0.371779 0.123494 0.998256
37 1 0.121267 0.124337 0.996347
669 1 0.872601 0.00214532 0.625259
34 1 0.0601609 0.185325 0.998064
1927 1 0.190904 0.493934 0.940214
38 1 0.189277 0.182461 1.00015
66 1 0.0629067 0.312999 0.998636
57 1 0.747862 0.121907 0.999691
77 1 0.375909 0.248376 0.999377
1537 1 0.995838 0.494698 0.501265
638 1 0.938066 0.435761 0.502522
6 1 0.18724 0.0581216 0.997394
526 1 0.437394 0.0625276 0.501914
58 1 0.813465 0.188978 0.99853
69 1 0.125419 0.245944 0.999796
554 1 0.311469 0.191418 0.503468
525 1 0.380591 0.000323428 0.501104
82 1 0.562228 0.313944 0.999389
562 1 0.561437 0.190746 0.500286
614 1 0.185971 0.438298 0.501263
561 1 0.500173 0.12712 0.502592
566 1 0.685311 0.190601 0.504643
89 1 0.755655 0.249771 0.993717
30 1 0.936975 0.0629172 1.00021
585 1 0.247182 0.253182 0.503637
633 1 0.748476 0.373501 0.500833
598 1 0.686483 0.317594 0.497146
537 1 0.747962 0.00383647 0.506396
605 1 0.875298 0.254551 0.502455
570 1 0.810797 0.185387 0.498203
545 1 0.997564 0.126104 0.502605
590 1 0.432143 0.30994 0.503197
542 1 0.936351 0.0641536 0.503581
589 1 0.376362 0.250174 0.500518
609 1 0.999949 0.368207 0.501767
1032 1 0.125767 0.561459 0.060743
1059 1 0.0610375 0.625794 0.0649325
1154 1 0.0620083 0.560073 0.127965
1189 1 0.129923 0.622606 0.125175
1305 1 0.754139 0.500793 0.249621
1533 1 0.87403 0.872844 0.382954
1026 1 0.0619276 0.564884 -0.000984422
413 1 0.874533 0.999333 0.376884
1036 1 0.252233 0.571285 0.0599187
1063 1 0.185738 0.62574 0.0640159
1067 1 0.31512 0.627708 0.063978
1158 1 0.193774 0.562262 0.123349
1162 1 0.308448 0.57469 0.125884
1193 1 0.250077 0.628466 0.125041
1534 1 0.937846 0.932348 0.374923
1535 1 0.937167 0.875493 0.441112
1175 1 0.689064 0.502129 0.191774
1173 1 0.62881 0.499171 0.122056
1040 1 0.372481 0.562554 0.0666167
1071 1 0.428644 0.625038 0.0582269
1166 1 0.439651 0.567962 0.119346
1197 1 0.376047 0.627268 0.127556
1044 1 0.50441 0.56401 0.0615763
1415 1 0.18603 0.502124 0.439119
1536 1 0.870004 0.936995 0.441295
1201 1 0.498039 0.625611 0.122418
1048 1 0.631882 0.564537 0.0585892
1075 1 0.561575 0.625263 0.0607029
1170 1 0.558473 0.56639 0.125869
1205 1 0.622354 0.622382 0.121545
1090 1 0.0621559 0.815716 0.00199568
1052 1 0.752094 0.563358 0.059422
1079 1 0.691881 0.627684 0.055667
1083 1 0.812148 0.624911 0.0655166
1174 1 0.692339 0.563505 0.125884
1178 1 0.812005 0.554707 0.120199
1209 1 0.749149 0.625813 0.129242
1094 1 0.18054 0.81626 0.00337423
1423 1 0.435239 0.503907 0.43956
1157 1 0.123466 0.499456 0.123534
1526 1 0.688221 0.937269 0.374646
1185 1 -0.000111463 0.62162 0.123182
1028 1 -0.000564023 0.558686 0.0632115
1056 1 0.877843 0.562604 0.0608859
1087 1 0.939784 0.617775 0.0624325
1182 1 0.935854 0.560489 0.12616
1213 1 0.872501 0.620793 0.123106
1527 1 0.685575 0.874819 0.441296
1529 1 0.749231 0.876951 0.371456
23 1 0.688396 0.995777 0.0658222
1130 1 0.309853 0.93489 0.00430294
1064 1 0.125621 0.687768 0.059842
1091 1 0.0607655 0.749378 0.0651772
1096 1 0.122041 0.809054 0.0610999
1186 1 0.061773 0.68387 0.127474
1218 1 0.0613684 0.810363 0.123429
1221 1 0.124252 0.752852 0.126611
1092 1 0.999646 0.810075 0.0606563
513 1 0.000989628 0.998966 0.499276
1027 1 0.0668873 0.501329 0.0627873
13 1 0.373206 0.998466 0.000143206
1068 1 0.252676 0.687951 0.0655994
1095 1 0.187127 0.750859 0.0605552
1099 1 0.310229 0.755729 0.0614194
1100 1 0.250126 0.815195 0.061941
1190 1 0.187956 0.688262 0.128362
1194 1 0.317369 0.691012 0.122913
1222 1 0.189839 0.810637 0.121789
1225 1 0.251298 0.751576 0.124977
1226 1 0.313186 0.812825 0.12299
1137 1 0.496642 0.876976 -0.00204598
1417 1 0.247981 0.501129 0.377092
1072 1 0.377539 0.689515 0.0585422
1103 1 0.435944 0.751449 0.0626017
1104 1 0.375716 0.814266 0.0657097
1198 1 0.436168 0.69001 0.125994
1229 1 0.377488 0.747701 0.122732
1230 1 0.437958 0.80912 0.126589
1076 1 0.496368 0.686251 0.0612752
401 1 0.499568 0.994891 0.377189
1161 1 0.257349 0.50639 0.126144
1108 1 0.503555 0.815573 0.0677181
1233 1 0.498641 0.749489 0.119829
1080 1 0.625412 0.684553 0.0591183
1107 1 0.56262 0.751433 0.0605165
1112 1 0.625766 0.809386 0.0594739
1202 1 0.564871 0.688413 0.121424
1234 1 0.569871 0.812816 0.1271
1237 1 0.627944 0.752203 0.123508
1098 1 0.314474 0.814309 0.000686939
1030 1 0.191739 0.562272 0.00591593
1084 1 0.754991 0.686643 0.0622717
1111 1 0.691987 0.74908 0.0574515
1115 1 0.812762 0.745049 0.0640924
1116 1 0.756588 0.809514 0.0643404
1206 1 0.686981 0.684053 0.124887
1210 1 0.813339 0.685534 0.128338
1238 1 0.688376 0.811085 0.119257
1241 1 0.751852 0.747409 0.125552
1242 1 0.813458 0.81369 0.126092
1303 1 0.681121 0.501429 0.319658
1129 1 0.243909 0.874313 0.00235551
1060 1 0.995298 0.688653 0.0608616
1217 1 0.998609 0.75555 0.125739
1088 1 0.877636 0.684329 0.0606875
1119 1 0.938688 0.751474 0.0611687
1120 1 0.872325 0.80966 0.0594682
1214 1 0.938874 0.687552 0.127003
1245 1 0.876354 0.746976 0.125826
1246 1 0.93046 0.809746 0.126346
1089 1 0.00180767 0.745301 0.000110669
1150 1 0.932636 0.935086 0.00100559
1123 1 0.0620409 0.877376 0.0635255
1128 1 0.122434 0.938267 0.0667015
1250 1 0.0614328 0.940281 0.123881
1253 1 0.122594 0.874896 0.124358
1249 1 0.997938 0.87661 0.123679
1124 1 0.992703 0.93556 0.0612118
1530 1 0.8132 0.93783 0.377297
1163 1 0.315479 0.503607 0.186368
1425 1 0.49981 0.503346 0.382398
1127 1 0.181987 0.876477 0.0643293
1131 1 0.316116 0.877906 0.061479
1132 1 0.25105 0.932033 0.0625577
1254 1 0.191563 0.937555 0.124653
1257 1 0.250832 0.874612 0.125178
1258 1 0.309704 0.934777 0.123256
1093 1 0.125708 0.750574 -0.000607007
1135 1 0.437849 0.87389 0.0648777
1136 1 0.376698 0.938946 0.0644228
1261 1 0.371837 0.872908 0.128303
1262 1 0.437432 0.936701 0.128666
1265 1 0.497387 0.875687 0.125534
1140 1 0.495924 0.937563 0.0605125
1531 1 0.808402 0.876261 0.43919
1139 1 0.558893 0.876937 0.0639072
1144 1 0.62842 0.938086 0.0669376
1266 1 0.557651 0.940906 0.124353
1269 1 0.631385 0.872512 0.119492
1141 1 0.621071 0.873103 0.000866847
1143 1 0.689983 0.874308 0.0635764
1147 1 0.813802 0.878362 0.0641201
1148 1 0.746898 0.934205 0.0589784
1270 1 0.687765 0.937971 0.128543
1273 1 0.750978 0.876384 0.123959
1274 1 0.813135 0.938342 0.125369
1532 1 0.749445 0.936481 0.436571
1289 1 0.2496 0.499066 0.252663
1153 1 0.995997 0.502204 0.125142
1151 1 0.929814 0.873157 0.0646986
1152 1 0.870782 0.936747 0.0619466
1277 1 0.872982 0.872429 0.124995
1278 1 0.936605 0.938991 0.126167
1307 1 0.807284 0.500724 0.314447
1125 1 0.121068 0.878429 -0.000144257
259 1 0.0589729 0.997234 0.314199
411 1 0.808181 1.00069 0.439184
1297 1 0.509215 0.505208 0.247146
1160 1 0.126116 0.562377 0.185875
1187 1 0.0633011 0.620057 0.186755
1282 1 0.0677735 0.558672 0.25247
1288 1 0.12879 0.560206 0.31425
1315 1 0.0657427 0.618948 0.312556
1317 1 0.124648 0.623926 0.247687
1284 1 0.0011013 0.559144 0.311258
1156 1 0.00312114 0.56438 0.185004
3 1 0.0571732 0.997785 0.0622496
1525 1 0.62691 0.87536 0.374369
1164 1 0.249765 0.566271 0.189738
1191 1 0.187968 0.623639 0.186634
1195 1 0.310094 0.629448 0.188746
1286 1 0.186416 0.561992 0.253106
1290 1 0.31521 0.561877 0.246071
1292 1 0.250052 0.561899 0.309813
1319 1 0.183886 0.62654 0.309444
1321 1 0.249885 0.626161 0.251095
1323 1 0.31117 0.63038 0.309547
1523 1 0.559259 0.880335 0.438211
1168 1 0.376014 0.565534 0.18027
1199 1 0.436409 0.622812 0.188356
1294 1 0.436328 0.562562 0.253716
1296 1 0.374541 0.564777 0.314199
1325 1 0.376305 0.623845 0.245877
1327 1 0.438026 0.63155 0.310698
1329 1 0.501027 0.629981 0.246648
1300 1 0.501848 0.561438 0.309393
1528 1 0.622216 0.939593 0.436498
1172 1 0.498173 0.563793 0.188206
1176 1 0.627224 0.560466 0.185863
1203 1 0.565759 0.627177 0.186512
1298 1 0.565455 0.569001 0.249135
1304 1 0.620814 0.564671 0.31001
1331 1 0.558284 0.630979 0.308864
1333 1 0.621887 0.629351 0.248065
1522 1 0.566304 0.938771 0.373236
1159 1 0.191498 0.503578 0.183482
1180 1 0.746818 0.562608 0.185048
1207 1 0.683763 0.624504 0.188446
1211 1 0.811865 0.621388 0.185017
1302 1 0.683269 0.567397 0.250472
1306 1 0.81726 0.56159 0.253848
1308 1 0.747719 0.562245 0.309338
1335 1 0.683879 0.627531 0.305048
1337 1 0.749987 0.623708 0.249066
1339 1 0.809632 0.626649 0.314797
1183 1 0.941704 0.49719 0.189336
271 1 0.439713 0.996864 0.31294
1313 1 0.00582392 0.621884 0.249599
1184 1 0.870473 0.558676 0.187875
1215 1 0.934434 0.620441 0.185457
1310 1 0.939538 0.560928 0.248278
1312 1 0.87605 0.563894 0.315839
1341 1 0.880481 0.622428 0.252489
1343 1 0.940378 0.623396 0.316786
1192 1 0.122504 0.689071 0.18724
1219 1 0.0552215 0.75376 0.19005
1224 1 0.125323 0.816155 0.183918
1314 1 0.0593458 0.688213 0.246948
1320 1 0.122544 0.689192 0.311894
1346 1 0.0621724 0.808881 0.253802
1347 1 0.0635312 0.75071 0.315814
1349 1 0.127307 0.749551 0.248402
1352 1 0.127283 0.812877 0.314475
1348 1 0.997548 0.811265 0.313786
1220 1 0.999485 0.813384 0.189815
1188 1 0.997338 0.684991 0.188065
1196 1 0.252597 0.692518 0.187213
1223 1 0.188131 0.750588 0.182484
1227 1 0.314642 0.753606 0.189402
1228 1 0.249363 0.815644 0.181124
1318 1 0.18639 0.681614 0.24603
1322 1 0.316182 0.692512 0.25175
1324 1 0.249224 0.684726 0.316567
1350 1 0.184249 0.812825 0.249019
1351 1 0.18923 0.748741 0.317203
1353 1 0.246986 0.749721 0.25826
1354 1 0.309984 0.815011 0.255377
1355 1 0.315198 0.753786 0.315198
1356 1 0.248502 0.812618 0.313989
1200 1 0.374737 0.692033 0.189113
1231 1 0.438551 0.754741 0.185662
1232 1 0.37525 0.812503 0.189964
1326 1 0.43468 0.683289 0.245238
1328 1 0.374139 0.685378 0.313168
1357 1 0.377467 0.752386 0.255702
1358 1 0.435605 0.812484 0.249408
1359 1 0.436917 0.746814 0.312257
1360 1 0.377278 0.810122 0.315411
1332 1 0.503607 0.690787 0.309599
1236 1 0.498429 0.817312 0.186224
1364 1 0.502174 0.811792 0.310934
1361 1 0.500291 0.751465 0.2468
1204 1 0.498827 0.693338 0.182521
1208 1 0.622414 0.68978 0.186458
1235 1 0.564082 0.749727 0.181445
1240 1 0.63001 0.817668 0.187193
1330 1 0.562414 0.692053 0.245942
1336 1 0.625806 0.696412 0.310025
1362 1 0.560914 0.812465 0.244975
1363 1 0.567357 0.756581 0.308253
1365 1 0.627912 0.749868 0.24697
1368 1 0.625639 0.817801 0.30717
1212 1 0.746674 0.686038 0.187383
1239 1 0.686482 0.754894 0.18632
1243 1 0.819061 0.745292 0.193112
1244 1 0.753866 0.813031 0.187292
1334 1 0.686528 0.69224 0.2443
1338 1 0.809737 0.681379 0.249642
1340 1 0.744918 0.68415 0.311532
1366 1 0.693315 0.814302 0.244587
1367 1 0.691488 0.752885 0.306274
1369 1 0.75205 0.746577 0.243184
1370 1 0.81344 0.811291 0.24947
1371 1 0.804389 0.745613 0.310056
1372 1 0.754062 0.815856 0.309537
1345 1 0.996429 0.747139 0.25514
1316 1 0.00328464 0.683442 0.31194
1216 1 0.877463 0.682865 0.194523
1247 1 0.935338 0.749457 0.190527
1248 1 0.871626 0.811544 0.187135
1342 1 0.938549 0.68609 0.251979
1344 1 0.870932 0.684641 0.313464
1373 1 0.872901 0.747298 0.258613
1374 1 0.934813 0.809566 0.248412
1375 1 0.932897 0.741891 0.317082
1376 1 0.875057 0.810362 0.314933
1301 1 0.624026 0.506319 0.249221
1251 1 0.0551141 0.871023 0.186027
1256 1 0.124406 0.933649 0.184688
1378 1 0.0595745 0.940943 0.249267
1379 1 0.0641607 0.872474 0.310326
1381 1 0.125519 0.876742 0.251533
1384 1 0.124037 0.937264 0.312607
1520 1 0.371104 0.93751 0.439598
1255 1 0.186055 0.876121 0.184447
1259 1 0.312232 0.875796 0.187218
1260 1 0.254873 0.938406 0.183924
1382 1 0.184079 0.93865 0.248707
1383 1 0.185051 0.875897 0.313462
1385 1 0.247899 0.875905 0.250106
1386 1 0.317957 0.939072 0.250307
1387 1 0.311854 0.876023 0.315793
1388 1 0.248995 0.939366 0.309877
1519 1 0.436876 0.873476 0.442935
1263 1 0.436801 0.875388 0.189028
1264 1 0.373274 0.934155 0.187108
1389 1 0.374446 0.874266 0.251936
1390 1 0.438794 0.935327 0.253532
1391 1 0.442067 0.871134 0.315783
1392 1 0.37606 0.935649 0.319149
1268 1 0.501496 0.93453 0.190083
1396 1 0.499343 0.933434 0.319196
1393 1 0.502084 0.881314 0.254572
1267 1 0.564628 0.878705 0.183352
1272 1 0.624412 0.9349 0.185053
1394 1 0.568596 0.934758 0.250342
1395 1 0.561966 0.87488 0.313322
1397 1 0.628953 0.872866 0.246752
1400 1 0.628404 0.931784 0.312628
269 1 0.376897 0.995586 0.252179
1521 1 0.497724 0.869546 0.378941
1518 1 0.437799 0.933198 0.37834
1105 1 0.49685 0.752997 -0.00211334
1271 1 0.693415 0.876886 0.185396
1275 1 0.810361 0.874754 0.184454
1276 1 0.75467 0.941063 0.187649
1398 1 0.688416 0.937434 0.249768
1399 1 0.693303 0.87384 0.308505
1401 1 0.754778 0.877855 0.248127
1402 1 0.814939 0.943735 0.24984
1403 1 0.813484 0.879271 0.314531
1404 1 0.751747 0.938683 0.308766
1517 1 0.374753 0.868437 0.375205
1524 1 0.498939 0.938592 0.440294
1311 1 0.93644 0.501305 0.311185
407 1 0.685229 0.99669 0.437604
1252 1 -0.000789679 0.939011 0.185117
1377 1 -0.00144689 0.870959 0.249485
1380 1 -0.000191761 0.93258 0.315061
1279 1 0.936862 0.872372 0.18642
1280 1 0.877291 0.937914 0.1932
1405 1 0.874067 0.874993 0.247248
1406 1 0.938815 0.938077 0.247646
1407 1 0.93561 0.873437 0.313561
1408 1 0.8757 0.935062 0.314227
1431 1 0.685555 0.50476 0.431496
1516 1 0.250884 0.938512 0.433848
1281 1 0.00200662 0.50144 0.248308
1410 1 0.0638753 0.561664 0.376378
1416 1 0.124443 0.558057 0.436874
1443 1 0.0586509 0.624445 0.440588
1445 1 0.11959 0.622124 0.374208
1412 1 -0.00421378 0.556843 0.43662
1510 1 0.188718 0.933831 0.371715
147 1 0.560406 0.999167 0.18719
1414 1 0.187915 0.564682 0.376708
1418 1 0.313137 0.565876 0.374801
1420 1 0.254582 0.56203 0.437721
1447 1 0.185589 0.623398 0.437367
1449 1 0.253497 0.619598 0.376087
1451 1 0.313446 0.630976 0.435526
397 1 0.379188 0.997742 0.373086
1511 1 0.1899 0.878099 0.43767
1562 1 0.817839 0.560925 0.505562
1422 1 0.437376 0.561346 0.370533
1424 1 0.374783 0.564424 0.443185
1453 1 0.377197 0.625884 0.370455
1455 1 0.43776 0.626731 0.433169
1428 1 0.499177 0.567103 0.437666
391 1 0.188454 0.99732 0.434305
1515 1 0.311976 0.874362 0.438031
1457 1 0.501346 0.626539 0.368354
1426 1 0.562426 0.567906 0.373239
1432 1 0.622447 0.566534 0.435019
1459 1 0.560957 0.628332 0.43661
1461 1 0.620528 0.634054 0.364994
1177 1 0.749338 0.501451 0.124366
1513 1 0.248021 0.876875 0.376378
1514 1 0.310309 0.941408 0.373658
1430 1 0.679733 0.568394 0.369639
1434 1 0.807592 0.559861 0.371421
1436 1 0.745723 0.5649 0.434411
1463 1 0.682455 0.625064 0.440463
1465 1 0.748585 0.622645 0.370658
1467 1 0.807039 0.622897 0.440788
1546 1 0.312469 0.562151 0.501129
1155 1 0.0661242 0.499676 0.187409
1122 1 0.057863 0.938701 0.00404616
1441 1 0.00290502 0.625726 0.376123
1438 1 0.937853 0.560228 0.370759
1440 1 0.872967 0.558038 0.431828
1469 1 0.874368 0.62098 0.376165
1471 1 0.937671 0.616555 0.43632
143 1 0.435197 0.997978 0.189143
1442 1 0.0640996 0.68828 0.374289
1448 1 0.12426 0.685764 0.434732
1474 1 0.0619203 0.813942 0.373688
1475 1 0.0648174 0.747766 0.439456
1477 1 0.125543 0.752458 0.374275
1480 1 0.120766 0.808641 0.438686
1473 1 -0.000977802 0.748122 0.377722
1444 1 0.995967 0.684538 0.439167
1476 1 0.00212947 0.813611 0.443951
389 1 0.128014 0.999482 0.371994
1638 1 0.193083 0.937217 0.499614
1446 1 0.187386 0.690342 0.375733
1450 1 0.317626 0.687284 0.373266
1452 1 0.250487 0.689599 0.434125
1478 1 0.186165 0.814744 0.376459
1479 1 0.186119 0.749877 0.438791
1481 1 0.254982 0.74709 0.372928
1482 1 0.310032 0.812019 0.377239
1483 1 0.311877 0.746068 0.43939
1484 1 0.24609 0.813061 0.436465
1299 1 0.562826 0.501933 0.307823
131 1 0.0626603 0.99638 0.188903
1454 1 0.440618 0.688396 0.375128
1456 1 0.377435 0.687883 0.438031
1485 1 0.375387 0.752972 0.381964
1486 1 0.43743 0.808946 0.376871
1487 1 0.439229 0.750774 0.438006
1488 1 0.369941 0.813245 0.441387
1492 1 0.498921 0.809614 0.43863
1460 1 0.500707 0.692788 0.429947
1489 1 0.497812 0.753698 0.370586
1458 1 0.561918 0.692765 0.37275
1464 1 0.625253 0.691909 0.435971
1496 1 0.623782 0.813696 0.438927
1490 1 0.566046 0.815482 0.372545
1493 1 0.632674 0.753854 0.375583
1491 1 0.56194 0.751501 0.437359
1035 1 0.314714 0.501521 0.0631581
1050 1 0.810681 0.563381 0.00151904
1500 1 0.747626 0.816108 0.438587
1499 1 0.808507 0.747637 0.432981
1498 1 0.811826 0.814257 0.370099
1497 1 0.751329 0.753182 0.372855
1495 1 0.685646 0.754344 0.439595
1494 1 0.687624 0.81575 0.373184
1468 1 0.749064 0.687404 0.444078
1466 1 0.807641 0.683644 0.376803
1462 1 0.6921 0.690184 0.375579
145 1 0.49796 0.99871 0.128648
139 1 0.312443 0.999159 0.183991
275 1 0.561018 0.995364 0.308531
265 1 0.25007 1.00179 0.244402
1509 1 0.128583 0.873395 0.372047
1506 1 0.0625761 0.933113 0.372413
1472 1 0.874004 0.688283 0.437515
1470 1 0.936435 0.680093 0.37547
1502 1 0.93999 0.810071 0.376372
1503 1 0.937232 0.747198 0.439586
1501 1 0.867141 0.752463 0.374757
1504 1 0.872105 0.811023 0.442357
27 1 0.813451 0.997738 0.0667339
1507 1 0.0634205 0.872938 0.436591
1505 1 0.00136566 0.873235 0.375909
1512 1 0.127783 0.936052 0.436177
1508 1 0.998483 0.938208 0.435188
1558 1 0.687541 0.566133 0.499147
1590 1 0.688082 0.684938 0.497027
1043 1 0.566632 0.50459 0.069253
1039 1 0.439051 0.505295 0.0581556
141 1 0.376195 0.998992 0.123189
273 1 0.49743 0.996966 0.249045
15 1 0.438417 1.00141 0.0697862
1167 1 0.438831 0.505958 0.188841
7 1 0.180436 0.996543 0.0643882
1650 1 0.563048 0.940454 0.498124
129 1 0.997327 0.998527 0.126512
137 1 0.250725 0.995958 0.127051
11 1 0.31216 0.998054 0.0629995
1586 1 0.557959 0.687542 0.493554
399 1 0.436865 0.9994 0.438127
1630 1 0.936913 0.810932 0.500029
135 1 0.183825 0.99878 0.186954
285 1 0.876563 1.00279 0.254722
31 1 0.932436 0.99641 0.0634226
1427 1 0.562851 0.500999 0.435433
1601 1 0.00464089 0.748473 0.498407
1569 1 -0.00129373 0.622596 0.501452
1550 1 0.440481 0.569513 0.498881
1433 1 0.745265 0.504654 0.372854
1165 1 0.382261 0.500266 0.123954
1169 1 0.499515 0.50172 0.122447
9 1 0.245246 0.996248 0.00518754
1613 1 0.37577 0.749294 0.499095
1582 1 0.439364 0.686809 0.495527
1542 1 0.190648 0.559186 0.498818
1577 1 0.250354 0.624805 0.49902
1570 1 0.0612152 0.683949 0.499545
1634 1 0.0636138 0.939482 0.498752
1574 1 0.188716 0.685079 0.496675
1645 1 0.378158 0.873332 0.497835
1145 1 0.754162 0.87451 0.00417923
1589 1 0.623157 0.62904 0.502657
1657 1 0.750279 0.876097 0.502688
1113 1 0.753019 0.750139 0.0024171
1554 1 0.56313 0.563927 0.496679
1101 1 0.372436 0.754585 0.00244362
1110 1 0.686976 0.815633 0.00211632
1557 1 0.624851 0.505656 0.49581
1134 1 0.433758 0.937051 0.00691625
1114 1 0.811161 0.811752 0.000845608
1057 1 0.000815897 0.624655 0.00145528
1045 1 0.619896 0.50303 -7.29835e-05
1 1 0.995333 0.996435 0.000580338
1544 1 0.131184 0.557266 0.55776
1571 1 0.0641142 0.617936 0.565619
1666 1 0.0643051 0.559696 0.627354
1701 1 0.129879 0.61896 0.625031
1539 1 0.0651591 0.500029 0.562722
911 1 0.437705 0.999826 0.943241
1697 1 0.00101279 0.625862 0.627016
1609 1 0.247777 0.752937 0.504813
1637 1 0.126883 0.877078 0.502998
1041 1 0.499996 0.503643 1.00011
1548 1 0.249073 0.563001 0.561019
1575 1 0.189833 0.625819 0.565685
1579 1 0.310413 0.625517 0.56257
1670 1 0.189666 0.556515 0.628524
1674 1 0.317936 0.563166 0.621013
1705 1 0.249112 0.623027 0.623427
1567 1 0.932498 0.498904 0.565411
1552 1 0.381432 0.569159 0.559159
1583 1 0.437222 0.633017 0.56208
1678 1 0.436796 0.565077 0.622285
1709 1 0.371853 0.622711 0.625953
1556 1 0.5016 0.562692 0.562752
2045 1 0.874101 0.874354 0.870892
1669 1 0.12415 0.502212 0.62602
799 1 0.936459 1.00074 0.812705
1693 1 0.874676 0.497695 0.629948
1713 1 0.503619 0.624313 0.62175
1560 1 0.627071 0.560143 0.559029
1587 1 0.56116 0.630314 0.561509
1682 1 0.565585 0.560923 0.625386
1717 1 0.624861 0.629076 0.632172
2046 1 0.937103 0.939756 0.874894
2047 1 0.937558 0.876495 0.93444
2048 1 0.875887 0.934103 0.939016
923 1 0.812721 0.993159 0.935054
1564 1 0.752331 0.560362 0.565245
1591 1 0.680296 0.63003 0.55979
1595 1 0.813028 0.620581 0.564127
1686 1 0.687611 0.564862 0.624931
1690 1 0.811271 0.560614 0.6291
1721 1 0.749656 0.625352 0.626397
1617 1 0.498927 0.748459 0.503315
1540 1 0.999603 0.558977 0.56696
1566 1 0.932453 0.56286 0.506144
903 1 0.186244 1.00031 0.938827
1568 1 0.875324 0.562333 0.566066
1599 1 0.937418 0.625827 0.562719
1694 1 0.938991 0.563156 0.627633
1725 1 0.87381 0.624763 0.624377
2020 1 0.0045729 0.938796 0.930603
1665 1 -0.000498244 0.501255 0.63093
1576 1 0.126077 0.687156 0.559569
1603 1 0.0603163 0.751194 0.561356
1608 1 0.12542 0.813363 0.563389
1698 1 0.065712 0.68666 0.620344
1730 1 0.0640082 0.806539 0.624758
1733 1 0.126438 0.747977 0.623615
1604 1 0.00329107 0.819491 0.566424
1572 1 1.00196 0.681966 0.56452
1605 1 0.122436 0.746746 0.501257
1941 1 0.621685 0.501075 0.874012
1580 1 0.252434 0.687226 0.562953
1607 1 0.189591 0.751612 0.561352
1611 1 0.315549 0.752012 0.561113
1612 1 0.254879 0.812715 0.56832
1702 1 0.18552 0.689212 0.624016
1706 1 0.315486 0.692601 0.624561
1734 1 0.187181 0.816714 0.621888
1737 1 0.250678 0.74525 0.624756
1738 1 0.314583 0.8139 0.624783
1610 1 0.312187 0.814385 0.502852
1037 1 0.376876 0.503326 1.00019
1578 1 0.312715 0.688429 0.503058
1584 1 0.373433 0.685985 0.563584
1615 1 0.438654 0.746874 0.562506
1616 1 0.376604 0.812633 0.565497
1710 1 0.436448 0.683844 0.625459
1741 1 0.374831 0.751042 0.620491
1742 1 0.443087 0.813497 0.622981
1588 1 0.501094 0.687354 0.565835
1793 1 0.000825161 0.506262 0.748754
1805 1 0.370619 0.505487 0.747318
779 1 0.309652 0.990327 0.810384
1745 1 0.495733 0.747842 0.630883
1620 1 0.502009 0.815 0.566855
1675 1 0.312314 0.505498 0.686627
1592 1 0.61937 0.68973 0.556831
1619 1 0.557622 0.751862 0.566298
1624 1 0.617842 0.813821 0.568816
1714 1 0.563044 0.68486 0.625236
1746 1 0.563687 0.810248 0.631263
1749 1 0.626152 0.742728 0.622318
1596 1 0.751752 0.682302 0.566912
1623 1 0.690175 0.74244 0.563594
1627 1 0.814548 0.744678 0.56752
1628 1 0.747493 0.812501 0.563665
1718 1 0.686366 0.684904 0.630249
1722 1 0.813379 0.682591 0.627588
1750 1 0.681891 0.80797 0.627555
1753 1 0.751538 0.750789 0.62754
1754 1 0.814225 0.811707 0.628724
1949 1 0.874558 0.496392 0.871489
909 1 0.376205 0.998383 0.876283
1729 1 0.999937 0.751073 0.621504
1600 1 0.877752 0.687316 0.565755
1631 1 0.943167 0.748484 0.558304
1632 1 0.875851 0.81027 0.566002
1726 1 0.936555 0.685551 0.623306
1757 1 0.877108 0.749216 0.620648
1758 1 0.94319 0.814547 0.625375
919 1 0.689783 0.995221 0.942081
1635 1 0.0658207 0.878825 0.567176
1640 1 0.122119 0.943561 0.566647
1762 1 0.0599541 0.946515 0.62114
1765 1 0.130616 0.879214 0.625871
1761 1 1.00122 0.875556 0.628331
1636 1 -0.00213147 0.9437 0.561248
1646 1 0.438923 0.937565 0.498799
927 1 0.937044 0.99697 0.939747
1641 1 0.253026 0.874457 0.501303
1649 1 0.501015 0.873257 0.502604
925 1 0.877091 0.998725 0.871395
1597 1 0.87518 0.625661 0.503127
1639 1 0.187444 0.87712 0.561043
1643 1 0.317264 0.87598 0.56238
1644 1 0.252665 0.94134 0.560765
1766 1 0.18864 0.93982 0.625424
1769 1 0.251819 0.877149 0.625987
1770 1 0.313191 0.935604 0.623419
1081 1 0.755416 0.62801 0.995144
1817 1 0.752754 0.496274 0.752783
1642 1 0.315171 0.937263 0.500819
1661 1 0.87258 0.874278 0.504323
917 1 0.623176 0.998773 0.878898
1618 1 0.562024 0.815883 0.50449
1647 1 0.439602 0.875372 0.562668
1648 1 0.37813 0.937812 0.564253
1773 1 0.381977 0.877218 0.624235
1774 1 0.444091 0.940436 0.622318
1652 1 0.50245 0.935935 0.56124
1777 1 0.503775 0.872325 0.631088
1551 1 0.438997 0.503645 0.562931
1614 1 0.436519 0.811955 0.502311
1947 1 0.811637 0.501696 0.936154
1651 1 0.565895 0.877755 0.56464
1656 1 0.621666 0.938394 0.560999
1778 1 0.558631 0.93872 0.62853
1781 1 0.628061 0.884352 0.628738
907 1 0.314005 0.995655 0.937404
647 1 0.186282 0.99778 0.688097
1146 1 0.810313 0.933907 1.00034
1655 1 0.684668 0.875592 0.571468
1659 1 0.811717 0.872952 0.564319
1660 1 0.751237 0.9331 0.565566
1782 1 0.688524 0.943115 0.624176
1785 1 0.750982 0.87321 0.627986
1786 1 0.810699 0.935055 0.625991
539 1 0.812804 1.00328 0.564124
1606 1 0.190544 0.817084 0.498598
1593 1 0.753511 0.619184 0.501284
1573 1 0.127328 0.622143 0.500465
1658 1 0.80994 0.933108 0.499767
1823 1 0.936994 0.500695 0.806719
655 1 0.441305 0.995514 0.685223
1663 1 0.940742 0.870188 0.56078
1664 1 0.872464 0.937263 0.563985
1789 1 0.876587 0.872006 0.622692
1790 1 0.933593 0.936235 0.622102
905 1 0.248559 0.999029 0.871012
921 1 0.748599 0.993216 0.875288
1563 1 0.810914 0.499736 0.56704
1672 1 0.131005 0.557552 0.686584
1699 1 0.0652758 0.626374 0.678387
1794 1 0.0646806 0.564355 0.752471
1800 1 0.13044 0.564554 0.816129
1827 1 0.0641454 0.62546 0.812223
1829 1 0.128429 0.619533 0.747541
1046 1 0.688563 0.56228 0.997924
1142 1 0.681953 0.931361 0.998166
1668 1 0.00245066 0.568162 0.686234
1681 1 0.501184 0.502 0.619976
1653 1 0.626928 0.874023 0.501839
1676 1 0.249584 0.563704 0.686138
1703 1 0.187202 0.628099 0.686377
1707 1 0.311539 0.623462 0.685981
1798 1 0.191458 0.563385 0.749531
1802 1 0.315535 0.570437 0.749376
1804 1 0.249562 0.562841 0.815578
1831 1 0.191203 0.625725 0.814747
1833 1 0.247249 0.622757 0.7464
1835 1 0.30478 0.629071 0.81188
2038 1 0.685599 0.930656 0.87092
2039 1 0.689201 0.875245 0.936382
1680 1 0.375977 0.563442 0.680777
1711 1 0.440156 0.621694 0.686375
1806 1 0.436922 0.555907 0.74781
1808 1 0.373245 0.565834 0.816382
1837 1 0.377496 0.629616 0.74451
1839 1 0.43745 0.622941 0.80991
1841 1 0.501055 0.626954 0.754191
1795 1 0.066314 0.500506 0.811778
1654 1 0.691429 0.937386 0.506809
1065 1 0.250859 0.634259 0.995184
2041 1 0.754086 0.872986 0.869404
1684 1 0.499884 0.563256 0.682376
1812 1 0.501075 0.563236 0.817497
1688 1 0.62495 0.563396 0.687906
1715 1 0.560406 0.620173 0.686254
1810 1 0.551516 0.562806 0.748378
1816 1 0.620409 0.563071 0.8082
1843 1 0.559379 0.624772 0.81356
1845 1 0.62008 0.622916 0.749451
1939 1 0.56191 0.501636 0.939134
1692 1 0.749508 0.557106 0.688432
1719 1 0.688175 0.623923 0.68866
1723 1 0.813738 0.621518 0.688034
1814 1 0.687188 0.563948 0.747061
1818 1 0.813252 0.565178 0.751442
1820 1 0.747219 0.561174 0.816975
1847 1 0.68246 0.62977 0.810064
1849 1 0.748484 0.623349 0.748134
1851 1 0.809563 0.625699 0.809571
2042 1 0.813149 0.934366 0.872156
1109 1 0.622343 0.749425 0.994421
2043 1 0.815924 0.870273 0.936815
1086 1 0.935676 0.684937 0.997967
1825 1 0.999323 0.621803 0.748562
1796 1 0.00410722 0.560466 0.810116
1696 1 0.87667 0.563148 0.688395
1727 1 0.937643 0.628692 0.685654
1822 1 0.936263 0.564036 0.747728
1824 1 0.874066 0.559264 0.813051
1853 1 0.875665 0.627089 0.74967
1855 1 0.939412 0.624638 0.811732
2044 1 0.750102 0.933471 0.935761
1704 1 0.118757 0.690609 0.687066
1731 1 0.063724 0.74834 0.684531
1736 1 0.121714 0.810294 0.688051
1826 1 0.0617892 0.686664 0.750576
1832 1 0.127211 0.687216 0.811964
1858 1 0.0613646 0.81199 0.745116
1859 1 0.0606001 0.750622 0.811957
1861 1 0.124838 0.753696 0.753632
1864 1 0.127923 0.815217 0.811272
1828 1 0.00459976 0.688113 0.811647
1708 1 0.246065 0.688291 0.690164
1735 1 0.186228 0.75285 0.686688
1739 1 0.318714 0.749055 0.689086
1740 1 0.246921 0.811823 0.684017
1830 1 0.185041 0.687458 0.749693
1834 1 0.315222 0.686622 0.749006
1836 1 0.248445 0.691003 0.814001
1862 1 0.192158 0.812067 0.75013
1863 1 0.189843 0.751786 0.813176
1865 1 0.246516 0.746693 0.753684
1866 1 0.306536 0.806189 0.748283
1867 1 0.314218 0.749434 0.813334
1868 1 0.247964 0.812125 0.811652
1712 1 0.378464 0.689001 0.684778
1743 1 0.43558 0.74997 0.688889
1744 1 0.379269 0.811619 0.683718
1838 1 0.436377 0.686791 0.754963
1840 1 0.374893 0.687531 0.81473
1869 1 0.374832 0.749594 0.75205
1870 1 0.437006 0.815435 0.752483
1871 1 0.434201 0.75055 0.815167
1872 1 0.375166 0.815299 0.813473
1873 1 0.497551 0.752233 0.749253
1844 1 0.500744 0.688702 0.812767
1876 1 0.498923 0.811357 0.812176
1748 1 0.500867 0.815074 0.691281
1716 1 0.49675 0.68161 0.684717
1720 1 0.621684 0.689152 0.689579
1747 1 0.558407 0.74885 0.688318
1752 1 0.623012 0.814921 0.687295
1842 1 0.559631 0.685256 0.748466
1848 1 0.625011 0.687058 0.8139
1874 1 0.560239 0.810375 0.748426
1875 1 0.565443 0.756098 0.814525
1877 1 0.626524 0.747242 0.755448
1880 1 0.627025 0.817504 0.814831
1724 1 0.748734 0.688187 0.690008
1751 1 0.686706 0.750626 0.690907
1755 1 0.813982 0.750064 0.688267
1756 1 0.747432 0.812198 0.683722
1846 1 0.685096 0.686028 0.748686
1850 1 0.812805 0.686611 0.746596
1852 1 0.744657 0.688405 0.807671
1878 1 0.686558 0.812314 0.750858
1879 1 0.687655 0.752586 0.815229
1881 1 0.744994 0.752555 0.74563
1882 1 0.813769 0.812807 0.745704
1883 1 0.810022 0.746827 0.809331
1884 1 0.7516 0.813124 0.810037
1732 1 0.00434117 0.807848 0.685502
1857 1 0.00202087 0.750461 0.746208
1860 1 0.000742219 0.813584 0.816069
1700 1 0.00183898 0.686313 0.688347
1728 1 0.876728 0.688393 0.685689
1759 1 0.938294 0.750311 0.683295
1760 1 0.883357 0.812096 0.685406
1854 1 0.939033 0.689419 0.752558
1856 1 0.873767 0.690626 0.813515
1885 1 0.874133 0.753273 0.751638
1886 1 0.943884 0.814541 0.748105
1887 1 0.94006 0.751077 0.810444
1888 1 0.87445 0.812 0.813774
1097 1 0.25158 0.752042 1.00098
1763 1 0.0615453 0.87352 0.684141
1768 1 0.119185 0.941129 0.688039
1890 1 0.0615561 0.941243 0.748796
1891 1 0.0640223 0.874763 0.812717
1893 1 0.123498 0.875668 0.747888
1896 1 0.123432 0.940655 0.810815
1892 1 1.00179 0.941164 0.812664
1085 1 0.871801 0.623493 0.999126
2014 1 0.938381 0.814918 0.876578
2036 1 0.498414 0.940274 0.933337
1767 1 0.18585 0.873425 0.686126
1771 1 0.316739 0.872746 0.684743
1772 1 0.251854 0.940783 0.6876
1894 1 0.182906 0.935652 0.744122
1895 1 0.18602 0.876384 0.81458
1897 1 0.249717 0.876824 0.747558
1898 1 0.309196 0.936261 0.746945
1899 1 0.312138 0.874955 0.812297
1900 1 0.244433 0.937448 0.807856
2033 1 0.501881 0.875935 0.874994
2040 1 0.624028 0.931462 0.932471
2037 1 0.620672 0.875979 0.872103
1775 1 0.438062 0.873948 0.686728
1776 1 0.373323 0.939743 0.687092
1901 1 0.368278 0.874433 0.747191
1902 1 0.434913 0.941403 0.748369
1903 1 0.434886 0.875543 0.811476
1904 1 0.375132 0.936943 0.812977
1905 1 0.497308 0.876451 0.752366
1780 1 0.503047 0.937166 0.697386
2034 1 0.557944 0.93655 0.874297
1908 1 0.50105 0.939947 0.811853
1779 1 0.56335 0.877784 0.691266
1784 1 0.623767 0.940373 0.690185
1906 1 0.563188 0.937975 0.753556
1907 1 0.563614 0.874888 0.810311
1909 1 0.627645 0.87739 0.74952
1912 1 0.623426 0.936573 0.813491
2035 1 0.557926 0.87739 0.935068
1069 1 0.375997 0.626584 0.995787
2015 1 0.938026 0.750217 0.938311
1783 1 0.687855 0.876647 0.690463
1787 1 0.810003 0.87326 0.688399
1788 1 0.749181 0.939281 0.689048
1910 1 0.687723 0.940426 0.754826
1911 1 0.688362 0.873559 0.807744
1913 1 0.751411 0.875225 0.749898
1914 1 0.810965 0.937238 0.748481
1915 1 0.816307 0.873077 0.808197
1916 1 0.747764 0.935152 0.813141
1695 1 0.939909 0.501464 0.684511
2016 1 0.87596 0.80901 0.936873
1889 1 0.00146052 0.879986 0.751409
1764 1 0.00273847 0.939316 0.684651
1791 1 0.936579 0.879817 0.687136
1792 1 0.874823 0.937435 0.688806
1917 1 0.877207 0.869975 0.751506
1918 1 0.937065 0.940315 0.749963
1919 1 0.939668 0.878015 0.814506
1920 1 0.874149 0.939541 0.811956
1082 1 0.811698 0.690228 0.995983
1985 1 0.99812 0.751231 0.874967
1922 1 0.066375 0.558391 0.875186
1928 1 0.129095 0.559396 0.94493
1955 1 0.0608541 0.624321 0.936301
1957 1 0.122593 0.621237 0.878058
2011 1 0.813076 0.749287 0.934323
1117 1 0.875417 0.744967 0.998849
1982 1 0.943135 0.686319 0.874159
1926 1 0.194126 0.559775 0.879972
1930 1 0.311892 0.562573 0.874017
1932 1 0.252148 0.56307 0.946039
1959 1 0.18729 0.625258 0.935787
1961 1 0.252737 0.630461 0.877056
1963 1 0.314282 0.62413 0.93856
1984 1 0.876927 0.684275 0.934788
2031 1 0.440998 0.876092 0.933002
2032 1 0.374362 0.937322 0.941536
1934 1 0.438871 0.56231 0.875217
1936 1 0.3722 0.562465 0.93693
1965 1 0.375898 0.624073 0.875353
1967 1 0.438675 0.630866 0.935475
1969 1 0.4999 0.63193 0.87321
2029 1 0.373497 0.873898 0.874877
1940 1 0.499392 0.5677 0.934958
1938 1 0.56315 0.564238 0.876156
1944 1 0.620276 0.565027 0.93736
1971 1 0.558568 0.627042 0.934926
1973 1 0.628646 0.62935 0.875984
1803 1 0.31322 0.502069 0.8078
1066 1 0.317386 0.68899 0.996258
2010 1 0.812137 0.809527 0.873565
1070 1 0.437617 0.688415 1.00117
2030 1 0.439669 0.937741 0.87601
1942 1 0.684027 0.562481 0.874143
1946 1 0.8149 0.56616 0.873516
1948 1 0.752095 0.562458 0.931546
1975 1 0.685376 0.626099 0.938354
1977 1 0.742431 0.624688 0.870686
1979 1 0.815123 0.622517 0.934871
2026 1 0.318559 0.934862 0.875606
2027 1 0.307271 0.875551 0.939738
2025 1 0.246241 0.8698 0.873615
1924 1 1.00186 0.558262 0.937402
1953 1 0.00523075 0.621593 0.872209
1950 1 0.937631 0.562908 0.878937
1952 1 0.880397 0.562562 0.939844
1981 1 0.87675 0.621673 0.872268
1983 1 0.942701 0.624317 0.933104
2028 1 0.248469 0.93567 0.937676
2022 1 0.186129 0.932301 0.875001
2013 1 0.87387 0.747693 0.873809
1954 1 0.0616096 0.686691 0.87602
1960 1 0.126658 0.684353 0.9375
1986 1 0.0660698 0.809096 0.873682
1987 1 0.0668256 0.746446 0.936967
1989 1 0.123707 0.748414 0.870347
1992 1 0.119994 0.810629 0.937251
1988 1 0.00438262 0.805856 0.94129
1956 1 -0.00111202 0.689491 0.938787
1102 1 0.435455 0.810649 0.998078
2018 1 0.0623638 0.940204 0.871578
1958 1 0.188315 0.688127 0.876508
1962 1 0.31559 0.687799 0.873842
1964 1 0.250473 0.689546 0.934035
1990 1 0.185582 0.812969 0.881603
1991 1 0.182839 0.750197 0.935975
1993 1 0.25054 0.747898 0.878017
1994 1 0.311032 0.814256 0.876054
1995 1 0.311436 0.75445 0.939565
1996 1 0.250657 0.811817 0.940711
2009 1 0.75087 0.748422 0.874082
2024 1 0.119946 0.935379 0.934118
2000 1 0.376115 0.814007 0.935153
1999 1 0.434497 0.749057 0.93992
1998 1 0.444171 0.812873 0.874502
1997 1 0.375931 0.750203 0.874785
1968 1 0.376465 0.684797 0.933924
1966 1 0.432626 0.691459 0.87784
2004 1 0.497408 0.813914 0.938349
1972 1 0.497636 0.692235 0.938482
2017 1 0.999363 0.877187 0.874366
2019 1 0.0594102 0.874333 0.935479
2001 1 0.500138 0.749024 0.874577
1970 1 0.563676 0.688344 0.873398
1976 1 0.621188 0.687262 0.936287
2002 1 0.560512 0.814681 0.87468
2003 1 0.563253 0.753424 0.938844
2005 1 0.625188 0.754025 0.87794
2008 1 0.623921 0.814124 0.936628
2021 1 0.12048 0.873308 0.874524
2023 1 0.182449 0.875026 0.937458
2012 1 0.750221 0.813283 0.942514
1974 1 0.690197 0.692354 0.876663
1978 1 0.812218 0.683644 0.87362
1980 1 0.747816 0.685201 0.937401
2006 1 0.689818 0.809601 0.878023
2007 1 0.686516 0.751052 0.941677
1118 1 0.937157 0.810661 0.998288
915 1 0.563117 1.00011 0.940767
531 1 0.562514 0.998866 0.563157
1549 1 0.373295 0.503719 0.501409
1807 1 0.437975 0.499716 0.811439
1933 1 0.374304 0.503076 0.868762
653 1 0.37799 0.997712 0.622029
5 1 0.125875 0.999414 0.997202
1935 1 0.442289 0.501857 0.938147
1937 1 0.503536 0.503193 0.877134
1679 1 0.439449 0.503879 0.677673
795 1 0.809249 1.00008 0.814186
1813 1 0.623334 0.504384 0.746454
1547 1 0.309186 0.500684 0.559368
1811 1 0.561849 0.501308 0.812329
1687 1 0.68339 0.503208 0.687437
775 1 0.184945 0.997588 0.809936
1799 1 0.190797 0.500078 0.813134
1797 1 0.129001 0.500752 0.750045
665 1 0.752176 0.997251 0.628981
1042 1 0.560808 0.568335 0.995748
1074 1 0.561711 0.690457 0.998685
1054 1 0.940094 0.563372 0.999061
1073 1 0.499877 0.625229 0.999898
1077 1 0.619968 0.626165 0.996878
1049 1 0.746645 0.505037 0.993304
1078 1 0.68835 0.687153 0.996857
1062 1 0.188543 0.687854 1.00087
25 1 0.753621 0.994113 0.999069
1133 1 0.378705 0.872142 0.998021
1038 1 0.439437 0.565698 0.993351
1121 1 0.998665 0.876078 0.999044
1126 1 0.184965 0.935588 1.0009
1034 1 0.315241 0.564153 0.998404
1058 1 0.0671651 0.683574 1.00004
1662 1 0.936424 0.940552 0.501964
1106 1 0.560225 0.814806 1.00219
1602 1 0.0665675 0.811397 0.501273
1621 1 0.628634 0.755834 0.502689
1622 1 0.688836 0.815943 0.502029
1625 1 0.751714 0.74799 0.50018
1061 1 0.128046 0.623713 0.997985
1138 1 0.562257 0.934687 0.999678
1598 1 0.942395 0.681058 0.5001
1594 1 0.815271 0.685803 0.505199
1629 1 0.873658 0.749564 0.505553
1149 1 0.870647 0.871454 0.999944
1538 1 0.0608075 0.556074 0.500175
1626 1 0.806283 0.808395 0.501996
1581 1 0.371147 0.630411 0.502187
541 1 0.870951 1.00055 0.502622
1585 1 0.498042 0.625469 0.502329
1633 1 0.00195968 0.878352 0.501528
| [
"[email protected]"
] | |
3a0cca8abc3ef3909ea5fe190d9189a0f0d90ae8 | d5b48163d236ca770be8e687f92192e2971397e8 | /globalvariableFunction1.py | 3477ac1307caf7853c39640dc6a0019cd95789a2 | [] | no_license | Kunal352000/python_program | 191f5d9c82980eb706e11457c2b5af54b0d2ae95 | 7a1c645f9eab87cc45a593955dcb61b35e2ce434 | refs/heads/main | 2023-07-12T19:06:19.121741 | 2021-08-21T11:58:41 | 2021-08-21T11:58:41 | 376,606,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | x=10#global variable
def f1():
print(x)#10
x+=5
print(x)#error
f1()
print(x)#error
def f2():
print(x)#error
f2()
| [
"[email protected]"
] | |
61e08cd88ef23fb7e87890a5b36fc050d8df3f6d | 4a2a0cfc984a9faa45903732d776cd61ea361779 | /pwncat/modules/agnostic/implant.py | 545178a75fb449321c95997dc09fb7844b0c1a07 | [] | no_license | PremHcz/pwncat | 2a746cfc546158fa288994b376fd71768672a33a | cb203349d7ca815c9350eb53f4bd2e0b0ee659fa | refs/heads/master | 2023-05-30T19:07:16.327320 | 2021-06-14T13:01:19 | 2021-06-14T13:01:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,437 | py | #!/usr/bin/env python3
from typing import List
from rich.prompt import Prompt
from pwncat.util import console
from pwncat.facts import Implant, KeepImplantFact
from pwncat.modules import Bool, Status, Argument, BaseModule, ModuleFailed
class Module(BaseModule):
"""Interact with installed implants in an open session. This module
provides the ability to remove implants as well as manually escalate
with a given implant. Implants implementing local escalation will
automatically be picked up by the `escalate` command, however this
module provides an alternative way to trigger escalation manually."""
PLATFORM = None
""" No platform restraints """
ARGUMENTS = {
"remove": Argument(Bool, default=False, help="remove installed implants"),
"escalate": Argument(
Bool, default=False, help="escalate using an installed local implant"
),
}
def run(self, session, remove, escalate):
""" Perform the requested action """
if (not remove and not escalate) or (remove and escalate):
raise ModuleFailed("expected one of escalate or remove")
# Look for matching implants
implants = list(
implant
for implant in session.run("enumerate", types=["implant.*"])
if not escalate
or "implant.replace" in implant.types
or "implant.spawn" in implant.types
)
try:
session._progress.stop()
console.print("Found the following implants:")
for i, implant in enumerate(implants):
console.print(f"{i+1}. {implant.title(session)}")
if remove:
prompt = "Which should we remove (e.g. '1 2 4', default: all)? "
elif escalate:
prompt = "Which should we attempt escalation with (e.g. '1 2 4', default: all)? "
while True:
selections = Prompt.ask(prompt, console=console)
if selections == "":
break
try:
implant_ids = [int(idx.strip()) for idx in selections]
# Filter the implants
implants: List[Implant] = [implants[i - 1] for i in implant_ids]
break
except (IndexError, ValueError):
console.print("[red]error[/red]: invalid selection!")
finally:
session._progress.start()
nremoved = 0
for implant in implants:
if remove:
try:
yield Status(f"removing: {implant.title(session)}")
implant.remove(session)
session.target.facts.remove(implant)
nremoved += 1
except KeepImplantFact:
# Remove implant types but leave the fact
implant.types.remove("implant.remote")
implant.types.remove("implant.replace")
implant.types.remove("implant.spawn")
nremoved += 1
except ModuleFailed:
session.log(
f"[red]error[/red]: removal failed: {implant.title(session)}"
)
elif escalate:
try:
yield Status(
f"attempting escalation with: {implant.title(session)}"
)
result = implant.escalate(session)
if "implant.spawn" in implant.types:
# Move to the newly established session
session.manager.target = result
else:
# Track the new shell layer in the current session
session.layers.append(result)
session.platform.refresh_uid()
session.log(
f"escalation [green]succeeded[/green] with: {implant.title(session)}"
)
break
except ModuleFailed:
continue
else:
if escalate:
raise ModuleFailed("no working local escalation implants found")
if nremoved:
session.log(f"removed {nremoved} implants from target")
# Save database modifications
session.db.transaction_manager.commit()
| [
"[email protected]"
] | |
b83dddb0ba5cc289c6faf02198f34fa7f1efb501 | bd1362c60313784c90013dfc9f0169e64389bf27 | /scripts/dbutil/set_wfo.py | 7790328ad4c796e3e0c601f831caf22e45761999 | [] | no_license | ForceCry/iem | 391aa9daf796591909cb9d4e60e27375adfb0eab | 4b0390d89e6570b99ca83a5fa9b042226e17c1ad | refs/heads/master | 2020-12-24T19:04:55.517409 | 2013-04-09T14:25:36 | 2013-04-09T14:25:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 917 | py | """
Assign a WFO to sites in the metadata tables that have no WFO set
$Id: $:
"""
import re
import iemdb
MESOSITE = iemdb.connect('mesosite')
mcursor = MESOSITE.cursor()
mcursor2 = MESOSITE.cursor()
# Find sites we need to check on
mcursor.execute("""select s.id, c.wfo, s.iemid, s.network
from stations s, cwa c WHERE
s.geom && c.the_geom and contains(c.the_geom, s.geom)
and (s.wfo IS NULL or s.wfo = '') and s.country = 'US' """)
for row in mcursor:
id = row[0]
wfo = row[1]
iemid = row[2]
network = row[3]
if wfo is not None:
print 'Assinging WFO: %s to IEMID: %s ID: %s NETWORK: %s' % (wfo,
iemid, id, network)
mcursor2.execute("UPDATE stations SET wfo = '%s' WHERE iemid = %s" % (
wfo, iemid) )
else:
print 'ERROR assigning WFO to IEMID: %s ID: %s NETWORK: %s' % (
iemid, id, network)
mcursor.close()
mcursor2.close()
MESOSITE.commit()
MESOSITE.close()
| [
"akrherz@95f8c243-6001-0410-b151-932e6a9ed213"
] | akrherz@95f8c243-6001-0410-b151-932e6a9ed213 |
613e4837a9f63c8b247a8e460f07e655fe9e2904 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2210/60716/276310.py | 155107bcaf3512fb1bad228a6138043e1592acac | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | def checkin(substr:list,parstr:str):
check = True
for t in range(len(parstr)):
if parstr[t] in substr:
substr.remove(parstr[t])
else:
break
return check
ucnum = int(input())
ans = list()
for i in range(ucnum):
str1 = input()
str2 = input()
anstr = str()
# lens = len(str1)
for j in range(len(str2),len(str1)+1):#length of substring
for k in range(len(str1)-j):#start of substring
tempstr = str1[k:k+j]
templist = list(tempstr)
if checkin(templist,str2):
ans.append(tempstr)
break
if len(ans)==i+1:
break
ans.append(anstr)
for i in ans:
print(i)
| [
"[email protected]"
] | |
0351042e28eadd00d7b26fdc3576ef296ac9ee15 | 99d7a6448a15e7770e3b6f3859da043300097136 | /src/database/migrate/isotopedb/versions/010_Add_aliquot_to_LabTable.py | a8e30e8ecebd6498a1eda6c619ef3184b2accad3 | [] | no_license | softtrainee/arlab | 125c5943f83b37bc7431ae985ac7b936e08a8fe4 | b691b6be8214dcb56921c55daed4d009b0b62027 | refs/heads/master | 2020-12-31T07:54:48.447800 | 2013-05-06T02:49:12 | 2013-05-06T02:49:12 | 53,566,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
meta = MetaData(bind=migrate_engine)
t = Table('LabTable', meta, autoload=True)
col = Column('aliquot', Integer)
col.create(t)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
meta = MetaData(bind=migrate_engine)
t = Table('LabTable', meta, autoload=True)
t.c.aliquot.drop()
| [
"jirhiker@localhost"
] | jirhiker@localhost |
3b611d0a8558f770c80aa297e878934cb325eab4 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Keras_tensorflow_nightly/source2.7/tensorflow/contrib/distributions/python/ops/binomial.py | 6a1bb39ab28218a411bdf4329965186bcf32bf30 | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 10,299 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Binomial distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
_binomial_sample_note = """
For each batch member of counts `value`, `P[value]` is the probability that
after sampling `self.total_count` draws from this Binomial distribution, the
number of successes is `value`. Since different sequences of draws can result in
the same counts, the probability includes a combinatorial coefficient.
Note: `value` must be a non-negative tensor with dtype `dtype` and whose shape
can be broadcast with `self.probs` and `self.total_count`. `value` is only legal
if it is less than or equal to `self.total_count` and its components are equal
to integer values.
"""
def _bdtr(k, n, p):
"""The binomial cumulative distribution function.
Args:
k: floating point `Tensor`.
n: floating point `Tensor`.
p: floating point `Tensor`.
Returns:
`sum_{j=0}^k p^j (1 - p)^(n - j)`.
"""
# Trick for getting safe backprop/gradients into n, k when
# betainc(a = 0, ..) = nan
# Write:
# where(unsafe, safe_output, betainc(where(unsafe, safe_input, input)))
ones = array_ops.ones_like(n - k)
k_eq_n = math_ops.equal(k, n)
safe_dn = array_ops.where(k_eq_n, ones, n - k)
dk = math_ops.betainc(a=safe_dn, b=k + 1, x=1 - p)
return array_ops.where(k_eq_n, ones, dk)
class Binomial(distribution.Distribution):
"""Binomial distribution.
This distribution is parameterized by `probs`, a (batch of) probabilities for
drawing a `1` and `total_count`, the number of trials per draw from the
Binomial.
#### Mathematical Details
The Binomial is a distribution over the number of `1`'s in `total_count`
independent trials, with each trial having the same probability of `1`, i.e.,
`probs`.
The probability mass function (pmf) is,
```none
pmf(k; n, p) = p**k (1 - p)**(n - k) / Z
Z = k! (n - k)! / n!
```
where:
* `total_count = n`,
* `probs = p`,
* `Z` is the normalizing constant, and,
* `n!` is the factorial of `n`.
#### Examples
Create a single distribution, corresponding to 5 coin flips.
```python
dist = Binomial(total_count=5., probs=.5)
```
Create a single distribution (using logits), corresponding to 5 coin flips.
```python
dist = Binomial(total_count=5., logits=0.)
```
Creates 3 distributions with the third distribution most likely to have
successes.
```python
p = [.2, .3, .8]
# n will be broadcast to [4., 4., 4.], to match p.
dist = Binomial(total_count=4., probs=p)
```
The distribution functions can be evaluated on counts.
```python
# counts same shape as p.
counts = [1., 2, 3]
dist.prob(counts) # Shape [3]
# p will be broadcast to [[.2, .3, .8], [.2, .3, .8]] to match counts.
counts = [[1., 2, 1], [2, 2, 4]]
dist.prob(counts) # Shape [2, 3]
# p will be broadcast to shape [5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7, 3]
```
"""
def __init__(self,
total_count,
logits=None,
probs=None,
validate_args=False,
allow_nan_stats=True,
name="Binomial"):
"""Initialize a batch of Binomial distributions.
Args:
total_count: Non-negative floating point tensor with shape broadcastable
to `[N1,..., Nm]` with `m >= 0` and the same dtype as `probs` or
`logits`. Defines this as a batch of `N1 x ... x Nm` different Binomial
distributions. Its components should be equal to integer values.
logits: Floating point tensor representing the log-odds of a
positive event with shape broadcastable to `[N1,..., Nm]` `m >= 0`, and
the same dtype as `total_count`. Each entry represents logits for the
probability of success for independent Binomial distributions. Only one
of `logits` or `probs` should be passed in.
probs: Positive floating point tensor with shape broadcastable to
`[N1,..., Nm]` `m >= 0`, `probs in [0, 1]`. Each entry represents the
probability of success for independent Binomial distributions. Only one
of `logits` or `probs` should be passed in.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[total_count, logits, probs]):
self._total_count = self._maybe_assert_valid_total_count(
ops.convert_to_tensor(total_count, name="total_count"),
validate_args)
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
validate_args=validate_args,
name=name)
super(Binomial, self).__init__(
dtype=self._probs.dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._total_count,
self._logits,
self._probs],
name=name)
@property
def total_count(self):
"""Number of trials."""
return self._total_count
@property
def logits(self):
"""Log-odds of drawing a `1`."""
return self._logits
@property
def probs(self):
"""Probability of drawing a `1`."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.total_count),
array_ops.shape(self.probs))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.total_count.get_shape(),
self.probs.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
@distribution_util.AppendDocstring(_binomial_sample_note)
def _log_prob(self, counts):
return self._log_unnormalized_prob(counts) - self._log_normalization(counts)
@distribution_util.AppendDocstring(_binomial_sample_note)
def _prob(self, counts):
return math_ops.exp(self._log_prob(counts))
def _cdf(self, counts):
counts = self._maybe_assert_valid_sample(counts)
probs = self.probs
if not (counts.shape.is_fully_defined()
and self.probs.shape.is_fully_defined()
and counts.shape.is_compatible_with(self.probs.shape)):
# If both shapes are well defined and equal, we skip broadcasting.
probs += array_ops.zeros_like(counts)
counts += array_ops.zeros_like(self.probs)
return _bdtr(k=counts, n=self.total_count, p=probs)
def _log_unnormalized_prob(self, counts):
counts = self._maybe_assert_valid_sample(counts)
return (counts * math_ops.log(self.probs) +
(self.total_count - counts) * math_ops.log1p(-self.probs))
def _log_normalization(self, counts):
counts = self._maybe_assert_valid_sample(counts)
return (math_ops.lgamma(1. + self.total_count - counts)
+ math_ops.lgamma(1. + counts)
- math_ops.lgamma(1. + self.total_count))
def _mean(self):
return self.total_count * self.probs
def _variance(self):
return self._mean() * (1. - self.probs)
@distribution_util.AppendDocstring(
"""Note that when `(1 + total_count) * probs` is an integer, there are
actually two modes. Namely, `(1 + total_count) * probs` and
`(1 + total_count) * probs - 1` are both modes. Here we return only the
larger of the two modes.""")
def _mode(self):
return math_ops.floor((1. + self.total_count) * self.probs)
def _maybe_assert_valid_total_count(self, total_count, validate_args):
if not validate_args:
return total_count
return control_flow_ops.with_dependencies([
check_ops.assert_non_negative(
total_count,
message="total_count must be non-negative."),
distribution_util.assert_integer_form(
total_count,
message="total_count cannot contain fractional components."),
], total_count)
def _maybe_assert_valid_sample(self, counts):
"""Check counts for proper shape, values, then return tensor version."""
if not self.validate_args:
return counts
counts = distribution_util.embed_check_nonnegative_integer_form(counts)
return control_flow_ops.with_dependencies([
check_ops.assert_less_equal(
counts, self.total_count,
message="counts are not less than or equal to n."),
], counts)
| [
"[email protected]"
] | |
6778fcb3d7e2e4b3c99b3c9d800ed919c050db8f | 21c09799d006ed6bede4123d57d6d54d977c0b63 | /python/bTag_signalStudies_scan_CSVv2_2017_LMT_down.py | 3cb5809abbff613cd9c8fa51a1f5bb22722d4a64 | [] | no_license | corvettettt/DijetRootTreeAnalyzer | 68cb12e6b280957e1eb22c9842b0b9b30ae2c779 | e65624ffc105798209436fc80fb82e2c252c6344 | refs/heads/master | 2021-05-06T09:57:12.816787 | 2019-04-18T15:32:38 | 2019-04-18T15:32:38 | 114,043,763 | 1 | 0 | null | 2017-12-12T22:02:46 | 2017-12-12T22:02:46 | null | UTF-8 | Python | false | false | 12,869 | py | import ROOT as rt
import math as math
import sys, os
from bTag_signalStudies import *
from optparse import OptionParser
from rootTools import tdrstyle as setTDRStyle
### plots for signals ###
# tagging rate vs mass for signals (b, udcs, g)
# scale factors vs mass for signals with uncertainty
# selections acceptance vs mass for signals
# shape comparison before and after b-tag selection (normalized to 1)
usage = """usage: python python/bTag_signalStudies.py -f bb -m qq"""
#eosPrefix = "root://eoscms.cern.ch//eos/cms"
#eosPath = "/store/group/phys_exotica/dijet/Dijet13TeV/deguio/fall16_red_MC/RSGravitonToQuarkQuark_kMpl01_Spring16_20161201_145940/"
eosPrefix = ""
eosPath = "/tmp/TylerW/"
sampleNames_qg = {
500:
1000:
2000:
3000:
4000:
5000:
6000:
7000:
8000:
9000:
}
#CHANGE FILE NAME AS SOON AS THE NTUPLES ARE READY
sampleNames_qq = {
500:
1000:
2000:
3000:
4000:
5000:
6000:
7000:
8000:
9000:
}
CSV_Value = {
'L':0.5803,
'M':0.8838,
'T':0.9693
}
treeName = "rootTupleTree/tree"
massRange = {500: [75,0,1500],
#750: [75,0,1500],
1000: [50,0,2000],
2000: [50,0,5000],
3000: [50,0,5000],
4000: [35,0,7000],
5000: [35,0,8000],
6000: [30,0,9000],
7000: [20,0,10000],
8000: [20,0,12000],
9000: [20,0,12000]
}
#book histos
hDict={}
for i,j in CSV_Value.items():
hDict[i] = {}
for i,j in CSV_Value.items():
prefix = str(mass)+"_"+i
hDict[i]["h_mass_all"] = rt.TH1F(prefix+"_mass_all", prefix+"_mass_all", massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed"] = rt.TH1F(prefix+"_mass_passed_deepCSV",prefix+"_mass_passed_deepCSV",massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed"].SetLineColor(rt.kOrange+8)
hDict[i]["h_mass_passed"].SetMarkerColor(rt.kOrange+8)
hDict[i]["h_mass_passed"].SetLineWidth(3)
hDict[i]["h_mass_passed"].GetXaxis().SetTitle("Resonance Mass [GeV]")
hDict[i]["h_mass_passed_0b"] = rt.TH1F(prefix+"_mass_passed_deepCSV_0b",prefix+"_mass_passed_deepCSV_0b",massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed_0b"].SetMarkerSize(0.5)
hDict[i]["h_mass_passed_1b"] = rt.TH1F(prefix+"_mass_passed_deepCSV_1b",prefix+"_mass_passed_deepCSV_1b",massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed_1b"].SetLineColor(rt.kRed)
hDict[i]["h_mass_passed_1b"].SetMarkerColor(rt.kRed)
hDict[i]["h_mass_passed_1b"].SetMarkerSize(0.5)
hDict[i]["h_mass_passed_2b"] = rt.TH1F(prefix+"_mass_passed_deepCSV_2b",prefix+"_mass_passed_deepCSV_2b",massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed_2b"].SetLineColor(rt.kBlue)
hDict[i]["h_mass_passed_2b"].SetMarkerColor(rt.kBlue)
hDict[i]["h_mass_passed_2b"].SetMarkerSize(0.5)
hDict[i]["h_mass_passed_le1b"] = rt.TH1F(prefix+"_mass_passed_deepCSV_le1b",prefix+"_mass_passed_deepCSV_le1b",massRange[mass][0],massRange[mass][1],massRange[mass][2])
hDict[i]["h_mass_passed_le1b"].SetLineColor(rt.kGreen)
hDict[i]["h_mass_passed_le1b"].SetMarkerColor(rt.kGreen)
hDict[i]["h_mass_passed_le1b"].SetMarkerSize(0.5)
hDict[i]["h_weight_0b"] = rt.TH1F(prefix+"_weight_0b",prefix+"_weight_0b",2000,0.,2.)
hDict[i]["h_weight_1b"] = rt.TH1F(prefix+"_weight_1b",prefix+"_weight_1b",2000,0.,2.)
hDict[i]["h_weight_2b"] = rt.TH1F(prefix+"_weight_2b",prefix+"_weight_2b",2000,0.,2.)
#loop over the tree and fill the histos
tchain = rt.TChain(treeName)
tchain.Add(sample)
nEntries = tchain.GetEntries()
for k in progressbar(range(nEntries), "Mass "+str(mass)+": ", 40):
tchain.GetEntry(k)
for i,j in CSV_Value.items():
hDict[i]["h_mass_all"].Fill(tchain.mjj)
#implement analysis
if not (abs(tchain.deltaETAjj)<1.3 and
abs(tchain.etaWJ_j1)<2.5 and
abs(tchain.etaWJ_j2)<2.5 and
tchain.pTWJ_j1>60 and
#tchain.pTWJ_j1<6500 and
tchain.pTWJ_j2>30 and
#tchain.pTWJ_j2<6500 and
#tchain.mjj > 1246 and
#tchain.mjj < 14000 and
tchain.PassJSON):
continue
hDict[i]["h_mass_passed"].Fill(tchain.mjj)
SFs = []
if tchain.jetCSVAK4_j1>j:
SFs.append(getattr(tchain,'CSVv2SF_%s_j1'%i.low()))
if tchain.jetCSVAK4_j2>j:
SFs.append(getattr(tchain,'CSVv2SF_%s_j2'%i.low()))
#hDict[i]["h_mass_passed_0b"].Fill(tchain.mjj,tchain.evtBweight_m)
hDict[i]["h_mass_passed_0b"].Fill(tchain.mjj,bWeight(SFs,0))
hDict[i]["h_mass_passed_1b"].Fill(tchain.mjj,bWeight(SFs,1))
hDict[i]["h_mass_passed_2b"].Fill(tchain.mjj,bWeight(SFs,2))
hDict[i]["h_mass_passed_le1b"].Fill(tchain.mjj,bWeight(SFs,1))
hDict[i]["h_mass_passed_le1b"].Fill(tchain.mjj,bWeight(SFs,2))
return hDict
if __name__ == '__main__':
rt.gROOT.SetBatch()
setTDRStyle.setTDRStyle()
###################################################################
parser = OptionParser(usage=usage)
parser.add_option('-f','--flavour',dest="flavour",type="string",default="none",
help="Name of the signal flavour")
parser.add_option('-m','--model',dest="model",type="string",default="qq",
help="Name of the signal model")
(options,args) = parser.parse_args()
flavour = options.flavour
model = options.model
print "selected flavour:",flavour
print "signal model :",model
###################################################################
# CSV_Value = [0.05,0.1,0.1522,0.2,0.25,0.3,0.35,0.4,0.45,0.4941,0.5803,0.6,0.65,0.7,0.75,0.8,0.85,0.8838,0.9693]
# book histos and graphs
mDict = {}
sampleNames = {}
# loop over the MC samples
if (model == "qq"):
sampleNames = sampleNames_qq
elif (model == "qg"):
sampleNames = sampleNames_qg
elif (model == "gg"):
sampleNames = sampleNames_gg
else:
print "model unknown"
exit
for mass, sample in sorted(sampleNames.iteritems()):
mDict[mass] = bookAndFill(mass,sample,flavour)
#Create ROOT file and save plain histos
outName = "signalHistos_"+flavour
outFolder = "signalHistos_"+flavour+'_Dec_ForScan_CSVv2_down'
if not os.path.exists(outFolder):
os.makedirs(outFolder)
if (flavour == "none"):
outName = ("ResonanceShapes_%s_13TeV_Spring16.root"%model)
g_an_acc={}
g_0btag_rate={}
g_1btag_rate={}
g_2btag_rate={}
g_le1btag_rate={}
g_0btag_weight={}
g_1btag_weight={}
g_2btag_weight={}
#make analysis vs mass
for i,j in CSV_Value.items():
g_an_acc[i] = rt.TGraphAsymmErrors()
g_0btag_rate[i] = rt.TGraphAsymmErrors()
g_0btag_rate[i].SetTitle("g_0btag_rate;Resonance Mass [GeV];Tagging Rate")
g_0btag_rate[i].SetLineWidth(2)
g_1btag_rate[i] = rt.TGraphAsymmErrors()
g_1btag_rate[i].SetMarkerColor(rt.kRed)
g_1btag_rate[i].SetLineColor(rt.kRed)
g_1btag_rate[i].SetLineWidth(2)
g_2btag_rate[i] = rt.TGraphAsymmErrors()
g_2btag_rate[i].SetMarkerColor(rt.kBlue)
g_2btag_rate[i].SetLineColor(rt.kBlue)
g_2btag_rate[i].SetLineWidth(2)
g_le1btag_rate[i] = rt.TGraphAsymmErrors()
g_le1btag_rate[i].SetMarkerColor(rt.kGreen)
g_le1btag_rate[i].SetLineColor(rt.kGreen)
g_le1btag_rate[i].SetLineWidth(2)
g_0btag_weight[i] = rt.TGraphAsymmErrors()
g_1btag_weight[i] = rt.TGraphAsymmErrors()
g_2btag_weight[i] = rt.TGraphAsymmErrors()
bin=0
for mass,hDict in sorted(mDict.iteritems()):
for i,j in CSV_Value.items():
num = hDict[i]["h_mass_passed"].GetSumOfWeights()
den = hDict[i]["h_mass_all"].GetSumOfWeights()
#g_an_acc.SetPoint(bin,mass,num/den) #wrong. the reduced ntuples have already the selection implemented
num = hDict[i]["h_mass_passed_0b"].GetSumOfWeights()
den = hDict[i]["h_mass_passed"].GetSumOfWeights()
g_0btag_rate[i].SetPoint(bin,mass,num/den)
g_0btag_weight[i].SetPoint(bin,mass,hDict[i]["h_weight_0b"].GetMean())
num = hDict[i]["h_mass_passed_1b"].GetSumOfWeights()
g_1btag_rate[i].SetPoint(bin,mass,num/den)
g_1btag_weight[i].SetPoint(bin,mass,hDict[i]["h_weight_1b"].GetMean())
num = hDict[i]["h_mass_passed_2b"].GetSumOfWeights()
g_2btag_rate[i].SetPoint(bin,mass,num/den)
g_2btag_weight[i].SetPoint(bin,mass,hDict[i]["h_weight_2b"].GetMean())
num = hDict[i]["h_mass_passed_le1b"].GetSumOfWeights()
g_le1btag_rate[i].SetPoint(bin,mass,num/den)
bin += 1
for i,j in CSV_Value.items():
rootFile = rt.TFile(outFolder+"/"+outName+"_"+i+".root", 'recreate')
for mass,hDict in sorted(mDict.iteritems()):
# shape comparison 0 btag
h1 = rt.TCanvas()
h1.SetGridx()
h1.SetGridy()
h1.cd()
hDict[i]["h_mass_passed"].DrawNormalized()
hDict[i]["h_mass_passed_0b"].DrawNormalized("sames")
hDict[i]["h_mass_passed_1b"].DrawNormalized("sames")
hDict[i]["h_mass_passed_2b"].DrawNormalized("sames")
leg = rt.TLegend(0.87, 0.80, 0.96, 0.89)
leg.AddEntry(hDict[i]["h_mass_passed"],"untagged","L")
leg.AddEntry(hDict[i]["h_mass_passed_0b"],"0-tag","P")
leg.AddEntry(hDict[i]["h_mass_passed_1b"],"1-tag","P")
leg.AddEntry(hDict[i]["h_mass_passed_2b"],"2-tag","P")
leg.Draw("same")
h1.Print(outFolder+"/shapes_"+str(mass)+"_"+flavour+"_"+i+".pdf")
for n,h in hDict[i].items():
h.Write()
g_an_acc[i].Write("g_an_acc")
g_0btag_rate_Q=Do_Inter(g_0btag_rate[i])
g_1btag_rate_Q=Do_Inter(g_1btag_rate[i])
g_2btag_rate_Q=Do_Inter(g_2btag_rate[i])
g_le1btag_rate_Q=Do_Inter(g_le1btag_rate[i])
# g_0btag_rate_Q.Write("g_0btag_rate")
g_0btag_rate_Q.SetLineWidth(2)
# g_1btag_rate_Q.Write("g_1btag_rate")
g_1btag_rate_Q.SetMarkerColor(rt.kRed)
g_1btag_rate_Q.SetLineColor(rt.kRed)
g_1btag_rate_Q.SetLineWidth(2)
# g_2btag_rate_Q.Write("g_2btag_rate")
g_2btag_rate_Q.SetMarkerColor(rt.kBlue)
g_2btag_rate_Q.SetLineColor(rt.kBlue)
g_2btag_rate_Q.SetLineWidth(2)
g_le1btag_rate_Q.SetMarkerColor(rt.kGreen)
g_le1btag_rate_Q.SetLineColor(rt.kGreen)
g_le1btag_rate_Q.SetLineWidth(2)
g_0btag_rate_Q.Write("g_0btag_rate")
g_1btag_rate_Q.Write("g_1btag_rate")
g_2btag_rate_Q.Write("g_2btag_rate")
g_le1btag_rate_Q.Write("g_le1btag_rate")
g_0btag_weight[i].Write("g_0btag_weight")
g_1btag_weight[i].Write("g_1btag_weight")
g_2btag_weight[i].Write("g_2btag_weight")
rootFile.Close()
for i,j in CSV_Value.items():
# Draw and print
# tagging rate vs mass
c1 = rt.TCanvas()
c1.SetGridx()
c1.SetGridy()
c1.cd()
g_0btag_rate_Q=Do_Inter(g_0btag_rate[i])
g_1btag_rate_Q=Do_Inter(g_1btag_rate[i])
g_2btag_rate_Q=Do_Inter(g_2btag_rate[i])
g_le1btag_rate_Q=Do_Inter(g_le1btag_rate[i])
g_0btag_rate_Q.SetLineWidth(2)
#g_0btag_rate_Q.Write("g_0btag_rate")
g_1btag_rate_Q.SetMarkerColor(rt.kRed)
g_1btag_rate_Q.SetLineColor(rt.kRed)
g_1btag_rate_Q.SetLineWidth(2)
#g_1btag_rate_Q.Write("g_1btag_rate")
g_2btag_rate_Q.SetMarkerColor(rt.kBlue)
g_2btag_rate_Q.SetLineColor(rt.kBlue)
g_2btag_rate_Q.SetLineWidth(2)
#g_2btag_rate_Q.Write("g_2btag_rate")
g_le1btag_rate_Q.SetMarkerColor(rt.kGreen)
g_le1btag_rate_Q.SetLineColor(rt.kGreen)
g_le1btag_rate_Q.SetLineWidth(2)
#g_le1btag_rate_Q.Write("g_le1btag_rate")
g_0btag_rate_Q.Draw("APL")
g_0btag_rate_Q.GetYaxis().SetRangeUser(0,1)
g_1btag_rate_Q.Draw("PL,sames")
g_2btag_rate_Q.Draw("PL,sames")
g_le1btag_rate_Q.Draw("PL,sames")
leg = rt.TLegend(0.87, 0.80, 0.96, 0.89)
leg.AddEntry(g_0btag_rate_Q,"0-tag","L")
leg.AddEntry(g_1btag_rate_Q,"1-tag","L")
leg.AddEntry(g_2btag_rate_Q,"2-tag","L")
leg.AddEntry(g_le1btag_rate_Q,"le1-tag","L")
leg.Draw("same")
c1.Print(outFolder+"/tagRate_"+flavour+"_"+i+".pdf")
# close file
#raw_input("Press Enter to continue...")
| [
"[email protected]"
] | |
9df92ff5844f388a0d92870ce2ceab590cc8a89d | 64870a6b0a38c63dd69387a2b9d591378dcaedfa | /setup.py | d9015e62651456081c3657bf72391f27c76df5af | [
"Unlicense"
] | permissive | andrewp-as-is/setuppy-generator.py | 28ff31d8fd6a66fb4d0ca77244fcc8acb5a53912 | ae27ae4d534a373f34ebe24a16353a214abf8fc5 | refs/heads/master | 2021-07-11T05:40:52.834761 | 2020-12-03T21:25:00 | 2020-12-03T21:25:00 | 217,581,540 | 13 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | import setuptools
setuptools.setup(
name='setuppy-generator',
version='2020.12.2',
install_requires=open('requirements.txt').read().splitlines(),
packages=setuptools.find_packages()
)
| [
"[email protected]"
] | |
8e8f8f023aaacf37507d97b5a08cd7038235cb03 | 76255205d52cb81da0f8e0014775b98195ae83a1 | /osticket/env/bin/django-admin | 25c1965788b29f4f38330119aa9805526df6877b | [] | no_license | vuvandang1995/OSticket | 6c4fafbadffd99f635f049ca19a3dd120152d159 | 80c364cf9a7313cb102b7d618c43411c394b09f4 | refs/heads/master | 2020-03-09T10:08:16.715876 | 2018-07-11T00:11:52 | 2018-07-11T00:11:52 | 128,729,604 | 5 | 3 | null | 2018-07-11T00:11:53 | 2018-04-09T07:08:35 | JavaScript | UTF-8 | Python | false | false | 281 | #!/home/osticket/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"[email protected]"
] | ||
e3ce904d92a58307e91532be437be396e12719b0 | e5483ab737acd9fb222f0b7d1c770cfdd45d2ba7 | /ecommerce/core/migrations/0015_auto_20200617_0543.py | ed6c2dc12074874bcb1dfb5cfa5901daff8e3542 | [] | no_license | mxmaslin/otus_web | 6c1e534047444d7a1fc4cd1bf8245c25d9fc4835 | b90ad69e1b5c1828fa2ace165710422d113d1d17 | refs/heads/master | 2022-12-09T19:52:58.626199 | 2020-07-07T19:15:52 | 2020-07-07T19:15:52 | 226,154,128 | 1 | 1 | null | 2022-12-08T03:23:10 | 2019-12-05T17:25:11 | JavaScript | UTF-8 | Python | false | false | 2,038 | py | # Generated by Django 2.2.12 on 2020-06-17 02:43
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0014_orderitem_ordered'),
]
operations = [
migrations.AddField(
model_name='order',
name='ref_code',
field=models.CharField(blank=True, max_length=20, null=True),
),
migrations.AlterField(
model_name='item',
name='category',
field=models.CharField(choices=[('S', 'Футболки'), ('SW', 'Спортивная одежда'), ('OW', 'Верхняя одежда')], max_length=2, verbose_name='Категория'),
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('street_address', models.CharField(max_length=100, verbose_name='Улица')),
('house_number', models.CharField(max_length=10, verbose_name='Дом, корпус')),
('apartment_number', models.CharField(max_length=10, verbose_name='Номер квартиры')),
('address_zip', models.CharField(max_length=6)),
('default', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Адрес',
'verbose_name_plural': 'Адреса',
},
),
migrations.AddField(
model_name='order',
name='shipping_address',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='shipping_address', to='core.Address'),
),
]
| [
"[email protected]"
] | |
ed526ae369d2eed2d321a2449ab09942936d0194 | b0af2f57aec5f6620fe73361f5aee18c3d12d7c5 | /code/robotics/PyAdvancedControl-master/mpc_sample/main.py | be94f52068f09aded7a128b2e28f37c4a582accf | [
"GPL-2.0-or-later",
"eCos-exception-2.0",
"MIT"
] | permissive | vicb1/python-reference | c7d3a7fee1b181cd4a80883467dc743b935993a2 | 40b9768124f2b9ef80c222017de068004d811d92 | refs/heads/master | 2022-10-09T08:41:56.699722 | 2022-10-04T00:53:58 | 2022-10-04T00:54:06 | 171,308,233 | 1 | 0 | MIT | 2022-06-21T23:43:38 | 2019-02-18T15:27:28 | Jupyter Notebook | UTF-8 | Python | false | false | 2,028 | py | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Simple Model Predictive Control Simulation
author Atsushi Sakai
"""
import time
from cvxpy import *
import numpy as np
import matplotlib.pyplot as plt
print("Simulation start")
np.random.seed(1)
n = 4 # state size
m = 2 # input size
T = 50 # number of horizon
# simulation parameter
alpha = 0.2
beta = 5.0
# Model Parameter
A = np.eye(n) + alpha * np.random.randn(n, n)
B = np.random.randn(n, m)
x_0 = beta * np.random.randn(n, 1)
x = Variable(n, T + 1)
u = Variable(m, T)
states = []
for t in range(T):
cost = sum_squares(x[:, t + 1]) + sum_squares(u[:, t])
constr = [x[:, t + 1] == A * x[:, t] + B * u[:, t],
norm(u[:, t], 'inf') <= 1]
states.append(Problem(Minimize(cost), constr))
# sums problem objectives and concatenates constraints.
prob = sum(states)
prob.constraints += [x[:, T] == 0, x[:, 0] == x_0]
start = time.time()
result = prob.solve(verbose=True)
elapsed_time = time.time() - start
print ("calc time:{0}".format(elapsed_time) + "[sec]")
if result == float("inf"):
print("Cannot optimize")
import sys
sys.exit()
# return
f = plt.figure()
# Plot (u_t)_1.
ax = f.add_subplot(211)
u1 = np.array(u[0, :].value[0, :])[0].tolist()
u2 = np.array(u[1, :].value[0, :])[0].tolist()
plt.plot(u1, '-r', label="u1")
plt.plot(u2, '-b', label="u2")
plt.ylabel(r"$u_t$", fontsize=16)
plt.yticks(np.linspace(-1.0, 1.0, 3))
plt.legend()
plt.grid(True)
# Plot (u_t)_2.
plt.subplot(2, 1, 2)
x1 = np.array(x[0, :].value[0, :])[0].tolist()
x2 = np.array(x[1, :].value[0, :])[0].tolist()
x3 = np.array(x[2, :].value[0, :])[0].tolist()
x4 = np.array(x[3, :].value[0, :])[0].tolist()
plt.plot(range(T + 1), x1, '-r', label="x1")
plt.plot(range(T + 1), x2, '-b', label="x2")
plt.plot(range(T + 1), x3, '-g', label="x3")
plt.plot(range(T + 1), x4, '-k', label="x4")
plt.yticks([-25, 0, 25])
plt.ylim([-25, 25])
plt.ylabel(r"$x_t$", fontsize=16)
plt.xlabel(r"$t$", fontsize=16)
plt.grid(True)
plt.legend()
plt.tight_layout()
plt.show()
| [
"[email protected]"
] | |
f6f4a37a17afcc48c0753be5d4751a06f2db2689 | 28b2144816ce1bf62b7481cd857fdc831a501f6b | /tabook/__init__.py | bcaae7c44a21c61773c0a480aee5d8b109854021 | [] | no_license | t0ster/Turbo-Address-Book | 3191a837d7d28cf9b8c9d20331fe0518062b3892 | 8c5463b1d4423a0c41d7ed75ff9a512ae1bc515b | refs/heads/master | 2020-05-31T14:10:46.449799 | 2011-06-28T14:07:49 | 2011-06-28T14:07:49 | 1,953,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | # -*- coding: utf-8 -*-
"""The Turbo-Address-Book package"""
| [
"[email protected]"
] | |
354af91557abc5587e4c6a68abaf64e39f6c2d67 | 6e64eb9a4353dc6bd89c649d27bb20aa61173d7d | /core/products/views/product/views.py | cd1644f54713f542333f36d4ef3078e2019fa4f8 | [] | no_license | RoodrigoRoot/deploy_django | 0d4b76ae41bab907d5d69b4e7c34d5151f9827bd | 593b4613f1c224e236ac7f798e771e447ada677d | refs/heads/master | 2022-11-28T05:39:37.350224 | 2020-03-19T19:42:04 | 2020-03-19T19:42:04 | 248,580,750 | 0 | 0 | null | 2022-11-22T03:38:31 | 2020-03-19T18:50:40 | JavaScript | UTF-8 | Python | false | false | 2,418 | py | from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from core.products.forms import ProductForm
from core.products.models import Product
class ProductListView(ListView):
model = Product
template_name = 'product/list.html'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(ProductListView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Listado de Productos'
return context
class ProductCreate(CreateView):
model = Product
template_name = 'product/create.html'
form_class = ProductForm
success_url = reverse_lazy('product_list')
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(ProductCreate, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Nuevo registro de un Producto'
context['action'] = 'add'
return context
class ProductUpdate(UpdateView):
model = Product
template_name = 'product/create.html'
form_class = ProductForm
success_url = reverse_lazy('product_list')
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(ProductUpdate, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Edición de un Producto'
context['action'] = 'edit'
return context
class ProductDelete(DeleteView):
model = Product
template_name = 'product/delete.html'
success_url = reverse_lazy('product_list')
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(ProductDelete, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = 'Notificación de eliminación'
context['url'] = reverse_lazy('product_list')
return context
| [
"[email protected]"
] | |
996800500c74f15861191e0d0013276f1dbb2768 | 673f9b85708affe260b892a4eb3b1f6a0bd39d44 | /Botnets/App/App Web/PDG-env/lib/python3.6/site-packages/pandas/tests/base/test_ops.py | 2693eb12dda711c9a9025040a68ec054f6b7c9e1 | [
"MIT"
] | permissive | i2tResearch/Ciberseguridad_web | feee3fe299029bef96b158d173ce2d28ef1418e4 | e6cccba69335816442c515d65d9aedea9e7dc58b | refs/heads/master | 2023-07-06T00:43:51.126684 | 2023-06-26T00:53:53 | 2023-06-26T00:53:53 | 94,152,032 | 14 | 0 | MIT | 2023-09-04T02:53:29 | 2017-06-13T00:21:00 | Jupyter Notebook | UTF-8 | Python | false | false | 33,362 | py | from datetime import datetime, timedelta
from io import StringIO
import sys
import numpy as np
import pytest
from pandas._libs.tslib import iNaT
from pandas.compat import PYPY
from pandas.compat.numpy import np_array_datetime64_compat
from pandas.core.dtypes.common import (
is_datetime64_dtype,
is_datetime64tz_dtype,
is_object_dtype,
needs_i8_conversion,
)
import pandas as pd
from pandas import (
DataFrame,
DatetimeIndex,
Index,
Interval,
IntervalIndex,
PeriodIndex,
Series,
Timedelta,
TimedeltaIndex,
Timestamp,
)
import pandas._testing as tm
from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin
class Ops:
def _allow_na_ops(self, obj):
"""Whether to skip test cases including NaN"""
if (isinstance(obj, Index) and obj.is_boolean()) or not obj._can_hold_na:
# don't test boolean / integer dtypes
return False
return True
def setup_method(self, method):
self.bool_index = tm.makeBoolIndex(10, name="a")
self.int_index = tm.makeIntIndex(10, name="a")
self.float_index = tm.makeFloatIndex(10, name="a")
self.dt_index = tm.makeDateIndex(10, name="a")
self.dt_tz_index = tm.makeDateIndex(10, name="a").tz_localize(tz="US/Eastern")
self.period_index = tm.makePeriodIndex(10, name="a")
self.string_index = tm.makeStringIndex(10, name="a")
self.unicode_index = tm.makeUnicodeIndex(10, name="a")
arr = np.random.randn(10)
self.bool_series = Series(arr, index=self.bool_index, name="a")
self.int_series = Series(arr, index=self.int_index, name="a")
self.float_series = Series(arr, index=self.float_index, name="a")
self.dt_series = Series(arr, index=self.dt_index, name="a")
self.dt_tz_series = self.dt_tz_index.to_series()
self.period_series = Series(arr, index=self.period_index, name="a")
self.string_series = Series(arr, index=self.string_index, name="a")
self.unicode_series = Series(arr, index=self.unicode_index, name="a")
types = ["bool", "int", "float", "dt", "dt_tz", "period", "string", "unicode"]
self.indexes = [getattr(self, f"{t}_index") for t in types]
self.series = [getattr(self, f"{t}_series") for t in types]
# To test narrow dtypes, we use narrower *data* elements, not *index* elements
index = self.int_index
self.float32_series = Series(arr.astype(np.float32), index=index, name="a")
arr_int = np.random.choice(10, size=10, replace=False)
self.int8_series = Series(arr_int.astype(np.int8), index=index, name="a")
self.int16_series = Series(arr_int.astype(np.int16), index=index, name="a")
self.int32_series = Series(arr_int.astype(np.int32), index=index, name="a")
self.uint8_series = Series(arr_int.astype(np.uint8), index=index, name="a")
self.uint16_series = Series(arr_int.astype(np.uint16), index=index, name="a")
self.uint32_series = Series(arr_int.astype(np.uint32), index=index, name="a")
nrw_types = ["float32", "int8", "int16", "int32", "uint8", "uint16", "uint32"]
self.narrow_series = [getattr(self, f"{t}_series") for t in nrw_types]
self.objs = self.indexes + self.series + self.narrow_series
def check_ops_properties(self, props, filter=None, ignore_failures=False):
for op in props:
for o in self.is_valid_objs:
# if a filter, skip if it doesn't match
if filter is not None:
filt = o.index if isinstance(o, Series) else o
if not filter(filt):
continue
try:
if isinstance(o, Series):
expected = Series(getattr(o.index, op), index=o.index, name="a")
else:
expected = getattr(o, op)
except (AttributeError):
if ignore_failures:
continue
result = getattr(o, op)
# these could be series, arrays or scalars
if isinstance(result, Series) and isinstance(expected, Series):
tm.assert_series_equal(result, expected)
elif isinstance(result, Index) and isinstance(expected, Index):
tm.assert_index_equal(result, expected)
elif isinstance(result, np.ndarray) and isinstance(
expected, np.ndarray
):
tm.assert_numpy_array_equal(result, expected)
else:
assert result == expected
# freq raises AttributeError on an Int64Index because its not
# defined we mostly care about Series here anyhow
if not ignore_failures:
for o in self.not_valid_objs:
# an object that is datetimelike will raise a TypeError,
# otherwise an AttributeError
err = AttributeError
if issubclass(type(o), DatetimeIndexOpsMixin):
err = TypeError
with pytest.raises(err):
getattr(o, op)
@pytest.mark.parametrize("klass", [Series, DataFrame])
def test_binary_ops_docs(self, klass):
op_map = {
"add": "+",
"sub": "-",
"mul": "*",
"mod": "%",
"pow": "**",
"truediv": "/",
"floordiv": "//",
}
for op_name in op_map:
operand1 = klass.__name__.lower()
operand2 = "other"
op = op_map[op_name]
expected_str = " ".join([operand1, op, operand2])
assert expected_str in getattr(klass, op_name).__doc__
# reverse version of the binary ops
expected_str = " ".join([operand2, op, operand1])
assert expected_str in getattr(klass, "r" + op_name).__doc__
class TestTranspose(Ops):
errmsg = "the 'axes' parameter is not supported"
def test_transpose(self):
for obj in self.objs:
tm.assert_equal(obj.transpose(), obj)
def test_transpose_non_default_axes(self):
for obj in self.objs:
with pytest.raises(ValueError, match=self.errmsg):
obj.transpose(1)
with pytest.raises(ValueError, match=self.errmsg):
obj.transpose(axes=1)
def test_numpy_transpose(self):
for obj in self.objs:
tm.assert_equal(np.transpose(obj), obj)
with pytest.raises(ValueError, match=self.errmsg):
np.transpose(obj, axes=1)
class TestIndexOps(Ops):
def setup_method(self, method):
super().setup_method(method)
self.is_valid_objs = self.objs
self.not_valid_objs = []
def test_none_comparison(self):
# bug brought up by #1079
# changed from TypeError in 0.17.0
for o in self.is_valid_objs:
if isinstance(o, Series):
o[0] = np.nan
# noinspection PyComparisonWithNone
result = o == None # noqa
assert not result.iat[0]
assert not result.iat[1]
# noinspection PyComparisonWithNone
result = o != None # noqa
assert result.iat[0]
assert result.iat[1]
result = None == o # noqa
assert not result.iat[0]
assert not result.iat[1]
result = None != o # noqa
assert result.iat[0]
assert result.iat[1]
if is_datetime64_dtype(o) or is_datetime64tz_dtype(o):
# Following DatetimeIndex (and Timestamp) convention,
# inequality comparisons with Series[datetime64] raise
with pytest.raises(TypeError):
None > o
with pytest.raises(TypeError):
o > None
else:
result = None > o
assert not result.iat[0]
assert not result.iat[1]
result = o < None
assert not result.iat[0]
assert not result.iat[1]
def test_ndarray_compat_properties(self):
for o in self.objs:
# Check that we work.
for p in ["shape", "dtype", "T", "nbytes"]:
assert getattr(o, p, None) is not None
# deprecated properties
for p in ["flags", "strides", "itemsize", "base", "data"]:
assert not hasattr(o, p)
with pytest.raises(ValueError):
o.item() # len > 1
assert o.ndim == 1
assert o.size == len(o)
assert Index([1]).item() == 1
assert Series([1]).item() == 1
def test_value_counts_unique_nunique(self):
for orig in self.objs:
o = orig.copy()
klass = type(o)
values = o._values
if isinstance(values, Index):
# reset name not to affect latter process
values.name = None
# create repeated values, 'n'th element is repeated by n+1 times
# skip boolean, because it only has 2 values at most
if isinstance(o, Index) and o.is_boolean():
continue
elif isinstance(o, Index):
expected_index = Index(o[::-1])
expected_index.name = None
o = o.repeat(range(1, len(o) + 1))
o.name = "a"
else:
expected_index = Index(values[::-1])
idx = o.index.repeat(range(1, len(o) + 1))
# take-based repeat
indices = np.repeat(np.arange(len(o)), range(1, len(o) + 1))
rep = values.take(indices)
o = klass(rep, index=idx, name="a")
# check values has the same dtype as the original
assert o.dtype == orig.dtype
expected_s = Series(
range(10, 0, -1), index=expected_index, dtype="int64", name="a"
)
result = o.value_counts()
tm.assert_series_equal(result, expected_s)
assert result.index.name is None
assert result.name == "a"
result = o.unique()
if isinstance(o, Index):
assert isinstance(result, type(o))
tm.assert_index_equal(result, orig)
assert result.dtype == orig.dtype
elif is_datetime64tz_dtype(o):
# datetimetz Series returns array of Timestamp
assert result[0] == orig[0]
for r in result:
assert isinstance(r, Timestamp)
tm.assert_numpy_array_equal(
result.astype(object), orig._values.astype(object)
)
else:
tm.assert_numpy_array_equal(result, orig.values)
assert result.dtype == orig.dtype
assert o.nunique() == len(np.unique(o.values))
@pytest.mark.parametrize("null_obj", [np.nan, None])
def test_value_counts_unique_nunique_null(self, null_obj):
for orig in self.objs:
o = orig.copy()
klass = type(o)
values = o._ndarray_values
if not self._allow_na_ops(o):
continue
# special assign to the numpy array
if is_datetime64tz_dtype(o):
if isinstance(o, DatetimeIndex):
v = o.asi8
v[0:2] = iNaT
values = o._shallow_copy(v)
else:
o = o.copy()
o[0:2] = pd.NaT
values = o._values
elif needs_i8_conversion(o):
values[0:2] = iNaT
values = o._shallow_copy(values)
else:
values[0:2] = null_obj
# check values has the same dtype as the original
assert values.dtype == o.dtype
# create repeated values, 'n'th element is repeated by n+1
# times
if isinstance(o, (DatetimeIndex, PeriodIndex)):
expected_index = o.copy()
expected_index.name = None
# attach name to klass
o = klass(values.repeat(range(1, len(o) + 1)))
o.name = "a"
else:
if isinstance(o, DatetimeIndex):
expected_index = orig._values._shallow_copy(values)
else:
expected_index = Index(values)
expected_index.name = None
o = o.repeat(range(1, len(o) + 1))
o.name = "a"
# check values has the same dtype as the original
assert o.dtype == orig.dtype
# check values correctly have NaN
nanloc = np.zeros(len(o), dtype=np.bool)
nanloc[:3] = True
if isinstance(o, Index):
tm.assert_numpy_array_equal(pd.isna(o), nanloc)
else:
exp = Series(nanloc, o.index, name="a")
tm.assert_series_equal(pd.isna(o), exp)
expected_s_na = Series(
list(range(10, 2, -1)) + [3],
index=expected_index[9:0:-1],
dtype="int64",
name="a",
)
expected_s = Series(
list(range(10, 2, -1)),
index=expected_index[9:1:-1],
dtype="int64",
name="a",
)
result_s_na = o.value_counts(dropna=False)
tm.assert_series_equal(result_s_na, expected_s_na)
assert result_s_na.index.name is None
assert result_s_na.name == "a"
result_s = o.value_counts()
tm.assert_series_equal(o.value_counts(), expected_s)
assert result_s.index.name is None
assert result_s.name == "a"
result = o.unique()
if isinstance(o, Index):
tm.assert_index_equal(result, Index(values[1:], name="a"))
elif is_datetime64tz_dtype(o):
# unable to compare NaT / nan
tm.assert_extension_array_equal(result[1:], values[2:])
assert result[0] is pd.NaT
else:
tm.assert_numpy_array_equal(result[1:], values[2:])
assert pd.isna(result[0])
assert result.dtype == orig.dtype
assert o.nunique() == 8
assert o.nunique(dropna=False) == 9
def test_value_counts_inferred(self, index_or_series):
klass = index_or_series
s_values = ["a", "b", "b", "b", "b", "c", "d", "d", "a", "a"]
s = klass(s_values)
expected = Series([4, 3, 2, 1], index=["b", "a", "d", "c"])
tm.assert_series_equal(s.value_counts(), expected)
if isinstance(s, Index):
exp = Index(np.unique(np.array(s_values, dtype=np.object_)))
tm.assert_index_equal(s.unique(), exp)
else:
exp = np.unique(np.array(s_values, dtype=np.object_))
tm.assert_numpy_array_equal(s.unique(), exp)
assert s.nunique() == 4
# don't sort, have to sort after the fact as not sorting is
# platform-dep
hist = s.value_counts(sort=False).sort_values()
expected = Series([3, 1, 4, 2], index=list("acbd")).sort_values()
tm.assert_series_equal(hist, expected)
# sort ascending
hist = s.value_counts(ascending=True)
expected = Series([1, 2, 3, 4], index=list("cdab"))
tm.assert_series_equal(hist, expected)
# relative histogram.
hist = s.value_counts(normalize=True)
expected = Series([0.4, 0.3, 0.2, 0.1], index=["b", "a", "d", "c"])
tm.assert_series_equal(hist, expected)
def test_value_counts_bins(self, index_or_series):
klass = index_or_series
s_values = ["a", "b", "b", "b", "b", "c", "d", "d", "a", "a"]
s = klass(s_values)
# bins
with pytest.raises(TypeError):
s.value_counts(bins=1)
s1 = Series([1, 1, 2, 3])
res1 = s1.value_counts(bins=1)
exp1 = Series({Interval(0.997, 3.0): 4})
tm.assert_series_equal(res1, exp1)
res1n = s1.value_counts(bins=1, normalize=True)
exp1n = Series({Interval(0.997, 3.0): 1.0})
tm.assert_series_equal(res1n, exp1n)
if isinstance(s1, Index):
tm.assert_index_equal(s1.unique(), Index([1, 2, 3]))
else:
exp = np.array([1, 2, 3], dtype=np.int64)
tm.assert_numpy_array_equal(s1.unique(), exp)
assert s1.nunique() == 3
# these return the same
res4 = s1.value_counts(bins=4, dropna=True)
intervals = IntervalIndex.from_breaks([0.997, 1.5, 2.0, 2.5, 3.0])
exp4 = Series([2, 1, 1, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4, exp4)
res4 = s1.value_counts(bins=4, dropna=False)
intervals = IntervalIndex.from_breaks([0.997, 1.5, 2.0, 2.5, 3.0])
exp4 = Series([2, 1, 1, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4, exp4)
res4n = s1.value_counts(bins=4, normalize=True)
exp4n = Series([0.5, 0.25, 0.25, 0], index=intervals.take([0, 3, 1, 2]))
tm.assert_series_equal(res4n, exp4n)
# handle NA's properly
s_values = ["a", "b", "b", "b", np.nan, np.nan, "d", "d", "a", "a", "b"]
s = klass(s_values)
expected = Series([4, 3, 2], index=["b", "a", "d"])
tm.assert_series_equal(s.value_counts(), expected)
if isinstance(s, Index):
exp = Index(["a", "b", np.nan, "d"])
tm.assert_index_equal(s.unique(), exp)
else:
exp = np.array(["a", "b", np.nan, "d"], dtype=object)
tm.assert_numpy_array_equal(s.unique(), exp)
assert s.nunique() == 3
s = klass({}) if klass is dict else klass({}, dtype=object)
expected = Series([], dtype=np.int64)
tm.assert_series_equal(s.value_counts(), expected, check_index_type=False)
# returned dtype differs depending on original
if isinstance(s, Index):
tm.assert_index_equal(s.unique(), Index([]), exact=False)
else:
tm.assert_numpy_array_equal(s.unique(), np.array([]), check_dtype=False)
assert s.nunique() == 0
def test_value_counts_datetime64(self, index_or_series):
klass = index_or_series
# GH 3002, datetime64[ns]
# don't test names though
txt = "\n".join(
[
"xxyyzz20100101PIE",
"xxyyzz20100101GUM",
"xxyyzz20100101EGG",
"xxyyww20090101EGG",
"foofoo20080909PIE",
"foofoo20080909GUM",
]
)
f = StringIO(txt)
df = pd.read_fwf(
f, widths=[6, 8, 3], names=["person_id", "dt", "food"], parse_dates=["dt"]
)
s = klass(df["dt"].copy())
s.name = None
idx = pd.to_datetime(
["2010-01-01 00:00:00", "2008-09-09 00:00:00", "2009-01-01 00:00:00"]
)
expected_s = Series([3, 2, 1], index=idx)
tm.assert_series_equal(s.value_counts(), expected_s)
expected = np_array_datetime64_compat(
["2010-01-01 00:00:00", "2009-01-01 00:00:00", "2008-09-09 00:00:00"],
dtype="datetime64[ns]",
)
if isinstance(s, Index):
tm.assert_index_equal(s.unique(), DatetimeIndex(expected))
else:
tm.assert_numpy_array_equal(s.unique(), expected)
assert s.nunique() == 3
# with NaT
s = df["dt"].copy()
s = klass(list(s.values) + [pd.NaT])
result = s.value_counts()
assert result.index.dtype == "datetime64[ns]"
tm.assert_series_equal(result, expected_s)
result = s.value_counts(dropna=False)
expected_s[pd.NaT] = 1
tm.assert_series_equal(result, expected_s)
unique = s.unique()
assert unique.dtype == "datetime64[ns]"
# numpy_array_equal cannot compare pd.NaT
if isinstance(s, Index):
exp_idx = DatetimeIndex(expected.tolist() + [pd.NaT])
tm.assert_index_equal(unique, exp_idx)
else:
tm.assert_numpy_array_equal(unique[:3], expected)
assert pd.isna(unique[3])
assert s.nunique() == 3
assert s.nunique(dropna=False) == 4
# timedelta64[ns]
td = df.dt - df.dt + timedelta(1)
td = klass(td, name="dt")
result = td.value_counts()
expected_s = Series([6], index=[Timedelta("1day")], name="dt")
tm.assert_series_equal(result, expected_s)
expected = TimedeltaIndex(["1 days"], name="dt")
if isinstance(td, Index):
tm.assert_index_equal(td.unique(), expected)
else:
tm.assert_numpy_array_equal(td.unique(), expected.values)
td2 = timedelta(1) + (df.dt - df.dt)
td2 = klass(td2, name="dt")
result2 = td2.value_counts()
tm.assert_series_equal(result2, expected_s)
def test_factorize(self):
for orig in self.objs:
o = orig.copy()
if isinstance(o, Index) and o.is_boolean():
exp_arr = np.array([0, 1] + [0] * 8, dtype=np.intp)
exp_uniques = o
exp_uniques = Index([False, True])
else:
exp_arr = np.array(range(len(o)), dtype=np.intp)
exp_uniques = o
codes, uniques = o.factorize()
tm.assert_numpy_array_equal(codes, exp_arr)
if isinstance(o, Series):
tm.assert_index_equal(uniques, Index(orig), check_names=False)
else:
# factorize explicitly resets name
tm.assert_index_equal(uniques, exp_uniques, check_names=False)
def test_factorize_repeated(self):
for orig in self.objs:
o = orig.copy()
# don't test boolean
if isinstance(o, Index) and o.is_boolean():
continue
# sort by value, and create duplicates
if isinstance(o, Series):
o = o.sort_values()
n = o.iloc[5:].append(o)
else:
indexer = o.argsort()
o = o.take(indexer)
n = o[5:].append(o)
exp_arr = np.array(
[5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=np.intp
)
codes, uniques = n.factorize(sort=True)
tm.assert_numpy_array_equal(codes, exp_arr)
if isinstance(o, Series):
tm.assert_index_equal(
uniques, Index(orig).sort_values(), check_names=False
)
else:
tm.assert_index_equal(uniques, o, check_names=False)
exp_arr = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4], np.intp)
codes, uniques = n.factorize(sort=False)
tm.assert_numpy_array_equal(codes, exp_arr)
if isinstance(o, Series):
expected = Index(o.iloc[5:10].append(o.iloc[:5]))
tm.assert_index_equal(uniques, expected, check_names=False)
else:
expected = o[5:10].append(o[:5])
tm.assert_index_equal(uniques, expected, check_names=False)
def test_duplicated_drop_duplicates_index(self):
# GH 4060
for original in self.objs:
if isinstance(original, Index):
# special case
if original.is_boolean():
result = original.drop_duplicates()
expected = Index([False, True], name="a")
tm.assert_index_equal(result, expected)
continue
# original doesn't have duplicates
expected = np.array([False] * len(original), dtype=bool)
duplicated = original.duplicated()
tm.assert_numpy_array_equal(duplicated, expected)
assert duplicated.dtype == bool
result = original.drop_duplicates()
tm.assert_index_equal(result, original)
assert result is not original
# has_duplicates
assert not original.has_duplicates
# create repeated values, 3rd and 5th values are duplicated
idx = original[list(range(len(original))) + [5, 3]]
expected = np.array([False] * len(original) + [True, True], dtype=bool)
duplicated = idx.duplicated()
tm.assert_numpy_array_equal(duplicated, expected)
assert duplicated.dtype == bool
tm.assert_index_equal(idx.drop_duplicates(), original)
base = [False] * len(idx)
base[3] = True
base[5] = True
expected = np.array(base)
duplicated = idx.duplicated(keep="last")
tm.assert_numpy_array_equal(duplicated, expected)
assert duplicated.dtype == bool
result = idx.drop_duplicates(keep="last")
tm.assert_index_equal(result, idx[~expected])
base = [False] * len(original) + [True, True]
base[3] = True
base[5] = True
expected = np.array(base)
duplicated = idx.duplicated(keep=False)
tm.assert_numpy_array_equal(duplicated, expected)
assert duplicated.dtype == bool
result = idx.drop_duplicates(keep=False)
tm.assert_index_equal(result, idx[~expected])
with pytest.raises(
TypeError,
match=r"drop_duplicates\(\) got an unexpected keyword argument",
):
idx.drop_duplicates(inplace=True)
else:
expected = Series(
[False] * len(original), index=original.index, name="a"
)
tm.assert_series_equal(original.duplicated(), expected)
result = original.drop_duplicates()
tm.assert_series_equal(result, original)
assert result is not original
idx = original.index[list(range(len(original))) + [5, 3]]
values = original._values[list(range(len(original))) + [5, 3]]
s = Series(values, index=idx, name="a")
expected = Series(
[False] * len(original) + [True, True], index=idx, name="a"
)
tm.assert_series_equal(s.duplicated(), expected)
tm.assert_series_equal(s.drop_duplicates(), original)
base = [False] * len(idx)
base[3] = True
base[5] = True
expected = Series(base, index=idx, name="a")
tm.assert_series_equal(s.duplicated(keep="last"), expected)
tm.assert_series_equal(
s.drop_duplicates(keep="last"), s[~np.array(base)]
)
base = [False] * len(original) + [True, True]
base[3] = True
base[5] = True
expected = Series(base, index=idx, name="a")
tm.assert_series_equal(s.duplicated(keep=False), expected)
tm.assert_series_equal(
s.drop_duplicates(keep=False), s[~np.array(base)]
)
s.drop_duplicates(inplace=True)
tm.assert_series_equal(s, original)
def test_drop_duplicates_series_vs_dataframe(self):
# GH 14192
df = pd.DataFrame(
{
"a": [1, 1, 1, "one", "one"],
"b": [2, 2, np.nan, np.nan, np.nan],
"c": [3, 3, np.nan, np.nan, "three"],
"d": [1, 2, 3, 4, 4],
"e": [
datetime(2015, 1, 1),
datetime(2015, 1, 1),
datetime(2015, 2, 1),
pd.NaT,
pd.NaT,
],
}
)
for column in df.columns:
for keep in ["first", "last", False]:
dropped_frame = df[[column]].drop_duplicates(keep=keep)
dropped_series = df[column].drop_duplicates(keep=keep)
tm.assert_frame_equal(dropped_frame, dropped_series.to_frame())
def test_fillna(self):
# # GH 11343
# though Index.fillna and Series.fillna has separate impl,
# test here to confirm these works as the same
for orig in self.objs:
o = orig.copy()
values = o.values
# values will not be changed
result = o.fillna(o.astype(object).values[0])
if isinstance(o, Index):
tm.assert_index_equal(o, result)
else:
tm.assert_series_equal(o, result)
# check shallow_copied
assert o is not result
for null_obj in [np.nan, None]:
for orig in self.objs:
o = orig.copy()
klass = type(o)
if not self._allow_na_ops(o):
continue
if needs_i8_conversion(o):
values = o.astype(object).values
fill_value = values[0]
values[0:2] = pd.NaT
else:
values = o.values.copy()
fill_value = o.values[0]
values[0:2] = null_obj
expected = [fill_value] * 2 + list(values[2:])
expected = klass(expected, dtype=orig.dtype)
o = klass(values)
# check values has the same dtype as the original
assert o.dtype == orig.dtype
result = o.fillna(fill_value)
if isinstance(o, Index):
tm.assert_index_equal(result, expected)
else:
tm.assert_series_equal(result, expected)
# check shallow_copied
assert o is not result
@pytest.mark.skipif(PYPY, reason="not relevant for PyPy")
def test_memory_usage(self):
for o in self.objs:
res = o.memory_usage()
res_deep = o.memory_usage(deep=True)
if is_object_dtype(o) or (
isinstance(o, Series) and is_object_dtype(o.index)
):
# if there are objects, only deep will pick them up
assert res_deep > res
else:
assert res == res_deep
if isinstance(o, Series):
assert (
o.memory_usage(index=False) + o.index.memory_usage()
) == o.memory_usage(index=True)
# sys.getsizeof will call the .memory_usage with
# deep=True, and add on some GC overhead
diff = res_deep - sys.getsizeof(o)
assert abs(diff) < 100
def test_searchsorted(self):
# See gh-12238
for o in self.objs:
index = np.searchsorted(o, max(o))
assert 0 <= index <= len(o)
index = np.searchsorted(o, max(o), sorter=range(len(o)))
assert 0 <= index <= len(o)
def test_validate_bool_args(self):
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.int_series.drop_duplicates(inplace=value)
def test_getitem(self):
for i in self.indexes:
s = pd.Series(i)
assert i[0] == s.iloc[0]
assert i[5] == s.iloc[5]
assert i[-1] == s.iloc[-1]
assert i[-1] == i[9]
with pytest.raises(IndexError):
i[20]
with pytest.raises(IndexError):
s.iloc[20]
@pytest.mark.parametrize("indexer_klass", [list, pd.Index])
@pytest.mark.parametrize(
"indexer",
[
[True] * 10,
[False] * 10,
[True, False, True, True, False, False, True, True, False, True],
],
)
def test_bool_indexing(self, indexer_klass, indexer):
# GH 22533
for idx in self.indexes:
exp_idx = [i for i in range(len(indexer)) if indexer[i]]
tm.assert_index_equal(idx[indexer_klass(indexer)], idx[exp_idx])
s = pd.Series(idx)
tm.assert_series_equal(s[indexer_klass(indexer)], s.iloc[exp_idx])
def test_get_indexer_non_unique_dtype_mismatch(self):
# GH 25459
indexes, missing = pd.Index(["A", "B"]).get_indexer_non_unique(pd.Index([0]))
tm.assert_numpy_array_equal(np.array([-1], dtype=np.intp), indexes)
tm.assert_numpy_array_equal(np.array([0], dtype=np.int64), missing)
| [
"[email protected]"
] | |
e6874392c8edf6d718560d3a91ebeb226324e9b0 | bd7f75e3acebad3f08959223365f0dfc84990f1b | /voc_label_multiClass_with_poly_non_interest.py | b51db2f55b8ab681e3427a2cebd54e58a8be0e7f | [
"MIT"
] | permissive | nghigd10/PyScripts | b6fcfb83dc9fe61f8c4f9972775b4b8ac53ffe2e | df8a37696b92b452b108d74f69e027b6a004670a | refs/heads/master | 2023-01-03T05:41:37.280633 | 2020-10-30T10:15:21 | 2020-10-30T10:15:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,328 | py | import xml.etree.ElementTree as ET
import pickle
import os
import re
import sys
import shutil
import math
from os import listdir, getcwd
from os.path import join
import cv2
import numpy as np
car = ["saloon_car", "suv", "van", "pickup"]
other = ["shop_truck", "unknown"]
bicycle = ["bicycle", "motorcycle"]
targettypes = ["car",
"car_front",
"car_rear",
"bicycle",
"person",
"cyclist",
"tricycle",
"motorcycle",
"non_interest_zone",
"non_interest_zones"]
classes_c9 = ["car",
"truck",
"waggon",
"passenger_car",
"other",
"bicycle",
"person",
"cyclist",
"tricycle",
"non_interest_zone"]
classes_c6 = ['car',
"bicycle",
"person",
"cyclist",
"tricycle",
"car_fr",
"non_interest_zone",
"non_interest_zones"]
classes_c5 = ['car', # 0
"bicycle", # 1
"person", # 2
"cyclist", # 3
"tricycle", # 4
"non_interest_zone"]
# classes = classes_c6
classes = classes_c5
class_num = len(classes) - 1 # 减1减的是non_interest_zone
car_fr = ["car_front", "car_rear"]
nCount = 0
def convert(size, box):
dw = 1./size[0]
dh = 1./size[1]
xmin = box[0]
xmax = box[1]
ymin = box[2]
ymax = box[3]
if xmin < 0:
xmin = 0
if xmax < 0 or xmin >= size[0]:
return None
if xmax >= size[0]:
xmax = size[0] - 1
if ymin < 0:
ymin = 0
if ymax < 0 or ymin >= size[1]:
return None
if ymax >= size[1]:
ymax = size[1] - 1
x = (xmin + xmax)/2.0
y = (ymin + ymax)/2.0
w = abs(xmax - xmin)
h = abs(ymax - ymin)
x = x*dw
w = w*dw
y = y*dh
h = h*dh
if w == 0 or h == 0:
return None
return (x, y, w, h)
def convert_annotation(imgpath, xmlpath, labelpath, filename):
in_file = open(xmlpath+'/'+filename+'.xml')
out_file = open(labelpath+'/'+filename+'.txt', 'w')
xml_info = in_file.read()
if xml_info.find('dataroot') < 0:
print("Can not find dataroot")
out_file.close()
in_file.close()
return [], []
#xml_info = xml_info.decode('GB2312').encode('utf-8')
#xml_info = xml_info.replace('GB2312', 'utf-8')
try:
root = ET.fromstring(xml_info)
except(Exception, e):
print("Error: cannot parse file")
#n = raw_input()
out_file.close()
in_file.close()
return [], []
boxes_non = []
poly_non = []
# Count = 0
label_statis = [0 for i in range(class_num)]
if root.find('markNode') != None:
obj = root.find('markNode').find('object')
if obj != None:
w = int(root.find('width').text)
h = int(root.find('height').text)
print("w:%d, h%d" % (w, h))
# print 'w=' + str(w) + ' h=' + str(h)
for obj in root.iter('object'):
target_type = obj.find('targettype')
cls_name = target_type.text
print(cls_name)
if cls_name not in targettypes:
print("********************************* "+cls_name +
" is not in targetTypes list *************************")
continue
# # classes_c9
# if cls_name == "car":
# cartype = obj.find('cartype').text
# # print(cartype)
# if cartype == 'motorcycle':
# cls_name = "bicycle"
# elif cartype == 'truck':
# cls_name = "truck"
# elif cartype == 'waggon':
# cls_name = 'waggon'
# elif cartype == 'passenger_car':
# cls_name = 'passenger_car'
# elif cartype == 'unkonwn' or cartype == "shop_truck":
# cls_name = "other"
# classes_c5
if cls_name == 'car_front' or cls_name == 'car_rear':
cls_name = 'car_fr'
if cls_name == 'car':
cartype = obj.find('cartype').text
if cartype == 'motorcycle':
cls_name = 'bicycle'
if cls_name == "motorcycle":
cls_name = "bicycle"
if cls_name not in classes:
print("********************************* " + cls_name +
" is not in class list *************************")
continue
cls_id = classes.index(cls_name)
# print(cls,cls_id)
cls_no = cls_id
# elif 'non_interest_zone' == cls:
# imgfile = imgpath + '/'+filename+'.jpg'
# img = cv2.imread(imgfile)
# xmin = int(xmlbox.find('xmin').text)
# xmax = int(xmlbox.find('xmax').text)
# ymin = int(xmlbox.find('ymin').text)
# ymax = int(xmlbox.find('ymax').text)
# print(xmin,xmax,ymin,ymax,img.shape)
# tmp = np.zeros((ymax-ymin,xmax-xmin,3),img.dtype)
# img[ymin:ymax,xmin:xmax] = tmp
# cv2.imwrite(imgfile,img)
# print("has non_interest_zone*************************************************************")
# continue
# print(cls_no)
# if cls_no != 6:
# continue
if cls_name == "non_interest_zones": # 有个bug, non_interest_zones时为bndbox,胡老板已修复。
try:
xmlpoly = obj.find('polygonPoints').text
print('xml_poly:', xmlpoly)
poly_ = re.split('[,;]', xmlpoly)
poly_non.append(poly_)
continue
except:
continue
# Count += 1
xmlbox = obj.find('bndbox')
b = (float(xmlbox.find('xmin').text),
float(xmlbox.find('xmax').text),
float(xmlbox.find('ymin').text),
float(xmlbox.find('ymax').text))
if cls_name == "non_interest_zone":
boxes_non.append(b)
continue
#
label_statis[cls_no] += 1
bb = convert((w, h), b)
if bb is None:
print("++++++++++++++++++++++++++++++box is error++++++++++++++++++++")
# sleep(10)
continue
out_file.write(str(cls_no) + " " +
" ".join([str(a) for a in bb]) + '\n')
print(str(cls_no) + " " + " ".join([str(a) for a in bb]))
out_file.close()
in_file.close()
# if Count > 0:
# return 0
# else:
# # if os.path.exists(labelpath+'/'+filename+'.txt'):
# # os.remove(labelpath+'/'+filename+'.txt')
# return -1
return poly_non, boxes_non, label_statis
if __name__ == "__main__":
# rootdir = '/users/maqiao/mq/Data_checked/multiClass/multiClass0320'
# root_path = "/users/maqiao/mq/Data_checked/multiClass/pucheng20191101"
# rootdirs = [
# '/users/maqiao/mq/Data_checked/multiClass/multiClass0320',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass0507',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass0606',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass0704',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190808',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190814',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190822-1',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190822-3',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190823',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190826',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190827',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190827_1',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190830',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190830_1',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190830_2',
# '/users/maqiao/mq/Data_checked/multiClass/multiClass190830_3'
# "/users/maqiao/mq/Data_checked/multiClass/mark/houhaicui",
# "/users/maqiao/mq/Data_checked/multiClass/mark/limingqing",
# "/users/maqiao/mq/Data_checked/multiClass/mark/mayanzhuo",
# "/users/maqiao/mq/Data_checked/multiClass/mark/quanqingfang",
# "/users/maqiao/mq/Data_checked/multiClass/mark/shenjinyan",
# "/users/maqiao/mq/Data_checked/multiClass/mark/wanglinan",
# "/users/maqiao/mq/Data_checked/multiClass/mark/yangyanping",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/houhaicui",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/limingqing",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/mayanzhuo",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/quanqingfang",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/shenjinyan",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/wanglinan",
# "/users/maqiao/mq/Data_checked/multiClass/duomubiao/yangyanping",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20190912",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20190920",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20190925",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20190930",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20191011",
# "/users/maqiao/mq/Data_checked/multiClass/tricycle_bigCar20191018",
# "/users/maqiao/mq/Data_checked/multiClass/pucheng20191012",
# "/users/maqiao/mq/Data_checked/multiClass/pucheng20191017",
# "/users/maqiao/mq/Data_checked/multiClass/pucheng20191025",
# "/users/maqiao/mq/Data_checked/multiClass/pucheng20191101"]
# changsha_test_poly_nointer
# /mnt/diskb/maqiao/multiClass/beijing20200110
# /mnt/diskb/maqiao/multiClass/changsha20191224-2
root_path = '/mnt/diskb/maqiao/multiClass/c5_puer_20200611'
rootdirs = ["/mnt/diskb/maqiao/multiClass/c5_puer_20200611"]
# root_path = '/users/duanyou/backup_c5/changsha_c5/test_new_chuiting'
# rootdirs = ["/users/duanyou/backup_c5/changsha_c5/test_new_chuiting"]
# root_path = 'F:/mq1/test_data'
# rootdirs = [root_path+'/1']
all_list_file = os.path.join(root_path, 'multiClass_train.txt')
all_list = open(os.path.join(root_path, all_list_file), 'w')
dir_num = len(rootdirs)
for j, rootdir in enumerate(rootdirs):
imgpath = rootdir + '/' + "JPEGImages_ori"
imgpath_dst = rootdir + '/' + "JPEGImages"
xmlpath = rootdir + '/' + "Annotations"
labelpath = rootdir + '/' + "labels"
if not os.path.exists(labelpath):
os.makedirs(labelpath)
if not os.path.exists(imgpath_dst):
os.makedirs(imgpath_dst)
list_file = open(rootdir + '/' + "train.txt", 'w')
file_lists = os.listdir(imgpath)
file_num = len(file_lists)
label_count = [0 for i in range(class_num)]
for i, imgname in enumerate(file_lists):
print("**************************************************************************************" +
str(i) + '/' + str(file_num)+' ' + str(j) + '/' + str(dir_num))
print(imgpath + '/' + imgname)
print(xmlpath+'/' + imgname[:-4] + ".xml")
if imgname.endswith('.jpg') and os.path.exists(xmlpath+'/'+imgname[:-4]+".xml"):
if not os.path.exists(imgpath): # 没有对应的图片则跳过
continue
poly_non, boxes_non, label_statis = convert_annotation(
imgpath, xmlpath, labelpath, imgname[:-4])
print('boxes_on:', boxes_non)
if label_statis == []:
continue
label_count = [label_count[i] + label_statis[i]
for i in range(class_num)]
img_ori = imgpath + '/' + imgname
img = cv2.imread(img_ori)
if img is None:
continue
# 把不感兴趣区域替换成颜色随机的图像块
is_data_ok = True
if len(boxes_non) > 0:
for b in boxes_non:
xmin = int(min(b[0], b[1]))
xmax = int(max(b[0], b[1]))
ymin = int(min(b[2], b[3]))
ymax = int(max(b[2], b[3]))
if xmax > img.shape[1] or ymax > img.shape[0]:
is_data_ok = False
break
if xmin < 0:
xmin = 0
if ymin < 0:
ymin = 0
if xmax > img.shape[1] - 1:
xmax = img.shape[1] - 1
if ymax > img.shape[0] - 1:
ymax = img.shape[0] - 1
h = int(ymax - ymin)
w = int(xmax - xmin)
img[ymin:ymax, xmin:xmax, :] = np.random.randint(
0, 255, (h, w, 3)) # 替换为马赛克
# 把不感兴趣多边形区域替换成黑色
if len(poly_non) > 0:
for poly in poly_non:
arr = []
i = 0
while i < len(poly) - 1:
arr.append([int(poly[i]), int(poly[i + 1])])
i = i + 2
arr = np.array(arr)
print('arr:', arr)
cv2.fillPoly(img, [arr], 0)
if not is_data_ok:
continue
img_dst = imgpath_dst + '/' + imgname
print(img_dst)
cv2.imwrite(img_dst, img)
list_file.write(img_dst+'\n')
all_list.write(img_dst+'\n')
print("label_count ", label_count)
list_file.close()
all_list.close()
| [
"[email protected]"
] | |
b53fe631fbbe3bae49798ea486ad1b37cf7a89b5 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part002589.py | e50690dda9b20734f0885f36379c31fb7774fe82 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,304 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher26020(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i2.2.1.2.1.1.0', 1, 1, None), Mul),
(VariableWithCount('i2.2.1.2.1.1.0_1', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher26020._instance is None:
CommutativeMatcher26020._instance = CommutativeMatcher26020()
return CommutativeMatcher26020._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 26019
return
yield
from collections import deque | [
"[email protected]"
] | |
2d573fe5931b3f1063ba73647b84291d080f1c8a | 2ad32d08c66cc02f5a19b3a9e2fbb7c5c25ed99c | /wolf_alg/Data_structures_and_algorithms_py/floyd.py | e459f00b0c5d7f500a47fb6c30e2f2b617c4b4ac | [] | no_license | wqjzzgci/wolf-ai | 5038dee45748809d16482ff6ecac7a2ae00dcbcf | 42cb88a312e1137ad1c59c8a82fc3c15b3cd5092 | refs/heads/master | 2020-03-10T03:20:58.453867 | 2018-04-09T10:23:10 | 2018-04-09T10:23:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 703 | py | #coding=utf-8
def floyd(double_list, vertexs):
for k in xrange(1, vertexs + 1):
for i in xrange(1, vertexs + 1):
for j in xrange(1, vertexs + 1):
tmp = min_value if double_list[i][k] >= min_value or double_list[k][j] >= min_value else double_list[i][k] + double_list[k][j]
if double_list[i][j] > tmp:
double_list[i][j] = tmp
return double_list
if __name__ == '__main__':
min_value = 999999999
ll = [
[min_value,min_value,min_value,min_value,min_value],
[min_value,0,2,6,4],
[min_value,min_value,0,3,min_value],
[min_value,7,min_value,0,1],
[min_value,5,min_value,12,0]]
print floyd(ll, 4)
| [
"[email protected]"
] | |
43c07454753909afed5dc71cb7ef52426b278069 | b34d7c5f810287ebaab09c58754bc59f03589ac3 | /ltc/controller/migrations/0005_auto_20220316_1624.py | df6da7a23d585093998fc8bdab84b883a059ef11 | [
"MIT"
] | permissive | r1990v/JMeter-Control-Center | 11d00276a35a502f91f05bf2adf5c88bf56fbfed | 6bfd13f008fce42c78badcb9d2579f069b064fe9 | refs/heads/master | 2023-01-07T12:40:43.370688 | 2022-09-27T11:05:56 | 2022-09-27T11:05:56 | 162,960,150 | 0 | 0 | null | 2018-12-24T06:53:26 | 2018-12-24T06:53:26 | null | UTF-8 | Python | false | false | 567 | py | # Generated by Django 2.2 on 2022-03-16 15:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('controller', '0004_auto_20210608_1506'),
]
operations = [
migrations.DeleteModel(
name='Proxy',
),
migrations.RemoveField(
model_name='testrunningdata',
name='test_running',
),
migrations.DeleteModel(
name='TestRunning',
),
migrations.DeleteModel(
name='TestRunningData',
),
]
| [
"[email protected]"
] | |
e26833183e66a8213241f6e0351fc7da369a112b | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /aloe/aloe/common/plot_2d.py | b295d55c7794a134eb4930874648ecfc1df9b8c4 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 2,275 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
def plot_heatmap(pdf_func, out_name, size=3):
w = 100
x = np.linspace(-size, size, w)
y = np.linspace(-size, size, w)
xx, yy = np.meshgrid(x, y)
coords = np.stack([xx.flatten(), yy.flatten()]).transpose()
scores = pdf_func(coords)
a = scores.reshape((w, w))
plt.imshow(a)
plt.axis('equal')
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_samples(samples, out_name, lim=None, axis=True):
plt.scatter(samples[:, 0], samples[:, 1], marker='.')
plt.axis('equal')
if lim is not None:
plt.xlim(-lim, lim)
plt.ylim(-lim, lim)
if not axis:
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_joint(dataset, samples, out_name):
x = np.max(dataset)
y = np.max(-dataset)
z = np.ceil(max((x, y)))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.legend(['training data', 'ADE sampled'])
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
fname = out_name.split('/')[-1]
out_name = '/'.join(out_name.split('/')[:-1]) + '/none-' + fname
plt.figure(figsize=(8, 8))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
| [
"[email protected]"
] | |
7df713c6d27a30122bb093277d9212602c441695 | 6a0b9581195400a93027aca881b1bc687401913d | /hackerrank-python/contests/world_cup/world_cup_team_formation.py | e702e9c67af3d7c45e54f7514b371704816cfd93 | [] | no_license | 108krohan/codor | 1d7ff503106ad6b2c18bc202d4c88f296600f28e | 2e485607080f919f273aa6c8c0d9cb3516cf4443 | refs/heads/master | 2021-04-28T21:13:34.604020 | 2018-02-18T11:55:26 | 2018-02-18T11:55:26 | 86,153,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | """world_cup_team_formation at hackerrank.com"""
lst = [int(raw_input()) for _ in xrange(10)]
lst.sort(reverse = True)
print lst[0] + lst[2] + lst[4]
"""
take sum of the first odd three numbers sorted in reverse order.
"""
| [
"[email protected]"
] | |
b25ad99d54465169e298d7e083d8bdab4876bdd9 | bc035331453d2f10e6179700ba74702158f28abf | /week1_2/storage/exceptions.py | 6ce04351444f3c784d6488edafd7fa99d4f4735b | [] | no_license | NikolaPavlov/Hack_Bulgaria_Django | ade6813ae19d7fb7f00f74b6ec800d5927313f12 | 74ee14ddae885e0ea457ef89cee361b9dc34206c | refs/heads/master | 2021-01-17T14:05:40.444361 | 2017-05-19T19:12:32 | 2017-05-19T19:12:32 | 83,479,831 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44 | py | class UserDoesNotExist(Exception):
pass
| [
"[email protected]"
] | |
6ecb0830a31ef2f1511f5a0b3920344955ddcfee | 641fa8341d8c436ad24945bcbf8e7d7d1dd7dbb2 | /components/exo/wayland/DEPS | 067074d486aaceee495cbd61658e383ec476bff5 | [
"BSD-3-Clause"
] | permissive | massnetwork/mass-browser | 7de0dfc541cbac00ffa7308541394bac1e945b76 | 67526da9358734698c067b7775be491423884339 | refs/heads/master | 2022-12-07T09:01:31.027715 | 2017-01-19T14:29:18 | 2017-01-19T14:29:18 | 73,799,690 | 4 | 4 | BSD-3-Clause | 2022-11-26T11:53:23 | 2016-11-15T09:49:29 | null | UTF-8 | Python | false | false | 54 | include_rules = [
"+third_party/wayland/include",
]
| [
"[email protected]"
] | ||
568fad17206fd645defb2cc4276ae1fc93ba66bc | 06a2a44e2de6f9f6ac815762468ba63b82cd00e1 | /apps/account/context_processors.py | 932c6e47b78fb03f1996728b8cd4ab0d9ddf9feb | [] | no_license | hqpr/marketcsgo | 153e0f3b180e6cc5eb771ba60e7cf5b6e7f31929 | dec9cdf1cafb836f5303a773ad77cf4824665722 | refs/heads/master | 2021-01-10T10:08:33.323776 | 2019-03-17T10:13:40 | 2019-03-17T10:13:40 | 48,641,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | from apps.account.models import UserProfile
def check_profile(request):
if request.user.is_authenticated():
try:
UserProfile.objects.get(user=request.user)
return {'valid': 1}
except UserProfile.DoesNotExist:
return {'valid': 0, 'user_id': request.user.id}
return {'valid': 1}
def debug_mode(request):
if request.user.is_authenticated():
try:
u = UserProfile.objects.get(user=request.user)
if u.debug_mode:
return {'debug': 1}
else:
return {'debug': 0}
except UserProfile.DoesNotExist:
return {'debug': 0, 'user_id': request.user.id}
return {'debug': 1}
| [
"[email protected]"
] | |
1dc11d54d1dfac5b787fddc7a6e886173250838b | cbc5e26bb47ae69e80a3649c90275becf25ce404 | /xlsxwriter/test/contenttypes/test_contenttypes01.py | 616bc445ea0552bde833c1b5083dd3f662857947 | [
"BSD-2-Clause-Views",
"BSD-3-Clause",
"MIT"
] | permissive | mst-solar-car/kicad-bom-generator | c3549409c3139f787ad28391372b5cb03791694a | 2aae905056d06f3d25343a8d784049c141d05640 | refs/heads/master | 2021-09-07T14:00:40.759486 | 2018-02-23T23:21:13 | 2018-02-23T23:21:13 | 107,868,801 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,590 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2017, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...contenttypes import ContentTypes
class TestAssembleContentTypes(unittest.TestCase):
"""
Test assembling a complete ContentTypes file.
"""
def test_assemble_xml_file(self):
"""Test writing an ContentTypes file."""
self.maxDiff = None
fh = StringIO()
content = ContentTypes()
content._set_filehandle(fh)
content._add_worksheet_name('sheet1')
content._add_default(('jpeg', 'image/jpeg'))
content._add_shared_strings()
content._add_calc_chain()
content._assemble_xml_file()
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types">
<Default Extension="rels" ContentType="application/vnd.openxmlformats-package.relationships+xml"/>
<Default Extension="xml" ContentType="application/xml"/>
<Default Extension="jpeg" ContentType="image/jpeg"/>
<Override PartName="/docProps/app.xml" ContentType="application/vnd.openxmlformats-officedocument.extended-properties+xml"/>
<Override PartName="/docProps/core.xml" ContentType="application/vnd.openxmlformats-package.core-properties+xml"/>
<Override PartName="/xl/styles.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml"/>
<Override PartName="/xl/theme/theme1.xml" ContentType="application/vnd.openxmlformats-officedocument.theme+xml"/>
<Override PartName="/xl/workbook.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml"/>
<Override PartName="/xl/worksheets/sheet1.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml"/>
<Override PartName="/xl/sharedStrings.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sharedStrings+xml"/>
<Override PartName="/xl/calcChain.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.calcChain+xml"/>
</Types>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"[email protected]"
] | |
3ed03d4991ef4fa680cfdf726f1c96e4d2300b78 | fa82dad9e83206d4630a55141bf44f50cbf0c3a8 | /day1_python/01_python200_src/151.py | 49b83dba5bc8007dbb2ae77e379ddb2c54516d06 | [] | no_license | jsh2333/pyml | 8f8c53a43af23b8490b25f35f28d85f1087df28d | 157dfa7cc2f1458f12e451691a994ac6ef138cab | refs/heads/master | 2021-03-27T22:26:38.254206 | 2020-04-26T06:35:11 | 2020-04-26T06:35:11 | 249,114,580 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | import os
pdir = os.getcwd(); print(pdir)
os.chdir('..'); print(os.getcwd())
os.chdir(pdir); print(os.getcwd())
| [
"[email protected]"
] | |
1381edf36fb363b531e4ef1d84b51910010dc909 | cace862c1d95f6b85a9750a427063a8b0e5ed49c | /binaryapi/ws/chanels/buy_contract_for_multiple_accounts.py | 014f93aa8f29ed48abbf5dc467f1f260ff845547 | [] | no_license | HyeongD/binaryapi | 65486532389210f1ca83f6f2098276ecf984702b | e8daa229c04de712242e8e9b79be3b774b409e35 | refs/heads/master | 2023-08-29T13:24:58.364810 | 2021-10-26T19:00:59 | 2021-10-26T19:00:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,349 | py | """Module for Binary buy_contract_for_multiple_accounts websocket channel."""
from binaryapi.ws.chanels.base import Base
from decimal import Decimal
from typing import Any, List, Union, Optional
# https://developers.binary.com/api/#buy_contract_for_multiple_accounts
class BuyContractForMultipleAccounts(Base):
"""Class for Binary buy_contract_for_multiple_accounts websocket channel."""
name = "buy_contract_for_multiple_accounts"
def __call__(self, buy_contract_for_multiple_accounts: str, price: Union[int, float, Decimal], tokens: List, parameters=None, passthrough: Optional[Any] = None, req_id: Optional[int] = None):
"""Method to send message to buy_contract_for_multiple_accounts websocket channel.
Buy Contract for Multiple Accounts (request)
Buy a Contract for multiple Accounts specified by the `tokens` parameter. Note, although this is an authorized call, the contract is not bought for the authorized account.
:param buy_contract_for_multiple_accounts: Either the ID received from a Price Proposal (`proposal` call), or `1` if contract buy parameters are passed in the `parameters` field.
:type buy_contract_for_multiple_accounts: str
:param price: Maximum price at which to purchase the contract.
:type price: Union[int, float, Decimal]
:param tokens: List of API tokens identifying the accounts for which the contract is bought. Note: If the same token appears multiple times or if multiple tokens designate the same account, the contract is bought multiple times for this account.
:type tokens: List
:param parameters: [Optional] Used to pass the parameters for contract buy.
:type parameters:
:param passthrough: [Optional] Used to pass data through the websocket, which may be retrieved via the `echo_req` output field.
:type passthrough: Optional[Any]
:param req_id: [Optional] Used to map request to response.
:type req_id: Optional[int]
"""
data = {
"buy_contract_for_multiple_accounts": buy_contract_for_multiple_accounts,
"price": price,
"tokens": tokens
}
if parameters:
data['parameters'] = parameters
return self.send_websocket_request(self.name, data, passthrough=passthrough, req_id=req_id)
| [
"[email protected]"
] | |
bd7a9a26fd311ea7acdf131684a5e555675fccf7 | 719853613b5b96f02072be1fde736d883e799f02 | /server/photos/migrations/0013_fill_in_iiw.py | 28e85fb066fa8095a74481b03e994527ba4f4b4b | [
"MIT"
] | permissive | anmolkabra/opensurfaces | 5ba442123586533a93eb29890fa1694e3efdbfe8 | a42420083a777d7e1906506cc218f681c5cd145b | refs/heads/master | 2020-03-20T01:11:05.182880 | 2018-06-13T14:55:45 | 2018-06-13T14:55:45 | 137,068,945 | 0 | 0 | MIT | 2018-06-12T12:32:53 | 2018-06-12T12:32:52 | null | UTF-8 | Python | false | false | 28,398 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from django.db.models import Q
class Migration(SchemaMigration):
def forwards(self, orm):
Photo = orm['photos.Photo']
Photo.objects.all() \
.update(in_iiw_dataset=False)
Photo.objects \
.filter(synthetic=False,
rotated=False,
stylized=False,
inappropriate=False,
nonperspective=False) \
.filter(num_intrinsic_comparisons__gt=0) \
.filter(Q(license__publishable=True) | Q(light_stack__isnull=False)) \
.update(in_iiw_dataset=True)
def backwards(self, orm):
pass
models = {
u'accounts.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'always_approve': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'blocked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'blocked_reason': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exclude_from_aggregation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_worker_id': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'user'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['auth.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'licenses.license': {
'Meta': {'object_name': 'License'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'cc_attribution': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_no_deriv': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_noncommercial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cc_share_alike': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'creative_commons': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'publishable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'})
},
u'mturk.experiment': {
'Meta': {'ordering': "['slug', 'variant']", 'unique_together': "(('slug', 'variant'),)", 'object_name': 'Experiment'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'completed_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'cubam_dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'examples_group_attr': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'has_tutorial': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'module': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'new_hit_settings': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiments'", 'null': 'True', 'to': u"orm['mturk.ExperimentSettings']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'template_dir': ('django.db.models.fields.CharField', [], {'default': "'mturk/experiments'", 'max_length': '255'}),
'test_contents_per_assignment': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'variant': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'mturk.experimentsettings': {
'Meta': {'object_name': 'ExperimentSettings'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'auto_add_hits': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'auto_approval_delay': ('django.db.models.fields.IntegerField', [], {'default': '2592000'}),
'content_filter': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'experiment_settings_in'", 'to': u"orm['contenttypes.ContentType']"}),
'contents_per_hit': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '1800'}),
'feedback_bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'frame_height': ('django.db.models.fields.IntegerField', [], {'default': '800'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'lifetime': ('django.db.models.fields.IntegerField', [], {'default': '2678400'}),
'max_active_hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'max_total_hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'min_output_consensus': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'num_outputs_max': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'out_content_attr': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'out_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiment_settings_out'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'out_count_ratio': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'qualifications': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'requirements': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'reward': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '4'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'mturk.experimenttestcontent': {
'Meta': {'ordering': "['-id']", 'object_name': 'ExperimentTestContent'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'test_contents'", 'to': u"orm['mturk.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'priority': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'})
},
u'mturk.mtassignment': {
'Meta': {'object_name': 'MtAssignment'},
'accept_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'action_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approval_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approve_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auto_approval_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'bonus_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'feedback': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'feedback_bonus_given': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_feedback': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': u"orm['mturk.MtHit']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'manually_rejected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'num_test_contents': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_test_correct': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_test_incorrect': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'partially_completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'post_meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'reject_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'rejection_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'screen_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'screen_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'submission_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'submit_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'test_contents': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'assignments'", 'symmetrical': 'False', 'to': u"orm['mturk.ExperimentTestContent']"}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_load_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_agent': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'wage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']", 'null': 'True', 'blank': 'True'})
},
u'mturk.mthit': {
'Meta': {'object_name': 'MtHit'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'all_submitted_assignments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'any_submitted_assignments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'compatible_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hit_status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'hit_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hits'", 'to': u"orm['mturk.MtHitType']"}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'incompatible_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lifetime': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_assignments': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'num_assignments_available': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_assignments_completed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_assignments_pending': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'num_contents': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'out_count_ratio': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'review_status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'mturk.mthittype': {
'Meta': {'object_name': 'MtHitType'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'auto_approval_delay': ('django.db.models.fields.IntegerField', [], {'default': '2592000'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '3600'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hit_types'", 'to': u"orm['mturk.Experiment']"}),
'experiment_settings': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hit_types'", 'to': u"orm['mturk.ExperimentSettings']"}),
'external_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'feedback_bonus': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'frame_height': ('django.db.models.fields.IntegerField', [], {'default': '800'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'default': "'0.01'", 'max_digits': '8', 'decimal_places': '4'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'photos.flickruser': {
'Meta': {'ordering': "['-id']", 'object_name': 'FlickrUser'},
'blacklisted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sub_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'website_name': ('django.db.models.fields.CharField', [], {'max_length': '1023', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '1023', 'blank': 'True'})
},
u'photos.photo': {
'Meta': {'ordering': "['aspect_ratio', '-id']", 'object_name': 'Photo'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'aspect_ratio': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'exif': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'flickr_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'flickr_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.FlickrUser']"}),
'focal_y': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'fov': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_orig': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'in_iiw_dataset': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'inappropriate': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['licenses.License']"}),
'light_stack': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.PhotoLightStack']"}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'median_intrinsic_error': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'nonperspective': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'num_intrinsic_comparisons': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_intrinsic_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_shapes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'num_vertices': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'orig_height': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'orig_width': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'rotated': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'scene_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photos'", 'null': 'True', 'to': u"orm['photos.PhotoSceneCategory']"}),
'scene_category_correct': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'scene_category_correct_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'scene_category_correct_score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'stylized': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'synthetic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"}),
'vanishing_length': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'vanishing_lines': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'vanishing_points': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'whitebalanced': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'whitebalanced_score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'photos.photolightstack': {
'Meta': {'ordering': "['-id']", 'object_name': 'PhotoLightStack'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'photos.photoscenecategory': {
'Meta': {'ordering': "['name']", 'object_name': 'PhotoSceneCategory'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['photos.PhotoSceneCategory']", 'null': 'True', 'blank': 'True'})
},
u'photos.photoscenequalitylabel': {
'Meta': {'ordering': "['photo', '-time_ms']", 'object_name': 'PhotoSceneQualityLabel'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'correct': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_assignment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['mturk.MtAssignment']"}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'scene_qualities'", 'to': u"orm['photos.Photo']"}),
'quality_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"})
},
u'photos.photowhitebalancelabel': {
'Meta': {'ordering': "['photo', '-time_ms']", 'object_name': 'PhotoWhitebalanceLabel'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'chroma_median': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mturk_assignment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['mturk.MtAssignment']"}),
'num_points': ('django.db.models.fields.IntegerField', [], {}),
'photo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'whitebalances'", 'to': u"orm['photos.Photo']"}),
'points': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'quality_method': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'reward': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'sandbox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time_active_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'time_ms': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.UserProfile']"}),
'whitebalanced': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['photos']
| [
"[email protected]"
] | |
407bf5e3d865565a63d71d38351ec11784634a45 | 6745bd6b607bbfb00dcf641980925753ec60f7d8 | /company/migrations/0025_auto_20170507_1103.py | 0800c545ee6393dc10ad6d843f09ee8a4204948f | [] | no_license | happychallenge/chemicals | 4a1822d32354ce85499f42ada47103d3f27e163c | aa2b08c92cefe1650591d965f2e7f4872c445363 | refs/heads/master | 2021-01-20T09:21:55.769758 | 2017-05-07T06:03:54 | 2017-05-07T06:03:54 | 90,245,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,089 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-07 02:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0024_remove_allprocess_send_msds'),
]
operations = [
migrations.AlterField(
model_name='company',
name='category',
field=models.CharField(choices=[('T', '贸易'), ('P', '生产'), ('M', '生产&贸易')], default='P', max_length=1, verbose_name='公司种类'),
),
migrations.AlterField(
model_name='company',
name='en_name',
field=models.CharField(max_length=50, verbose_name='English Name'),
),
migrations.AlterField(
model_name='company',
name='name',
field=models.CharField(max_length=30, verbose_name='中文名字'),
),
migrations.AlterField(
model_name='companyproduct',
name='currency',
field=models.CharField(choices=[('D', 'USD'), ('R', 'RMB')], default='R', max_length=1),
),
migrations.AlterField(
model_name='customer',
name='en_name',
field=models.CharField(max_length=30, verbose_name='English Name'),
),
migrations.AlterField(
model_name='product',
name='atomic_amount',
field=models.FloatField(verbose_name='分子量'),
),
migrations.AlterField(
model_name='product',
name='cn_name',
field=models.CharField(max_length=50, verbose_name='中文名字'),
),
migrations.AlterField(
model_name='product',
name='en_name',
field=models.CharField(max_length=30, verbose_name='English Name'),
),
migrations.AlterField(
model_name='product',
name='image',
field=models.ImageField(upload_to='chemical/', verbose_name='结构式'),
),
migrations.AlterField(
model_name='product',
name='usage',
field=models.TextField(blank=True, null=True, verbose_name='用途'),
),
migrations.AlterField(
model_name='purchasecontract',
name='actualdelivery_at',
field=models.DateField(blank=True, null=True, verbose_name='实际到达日子'),
),
migrations.AlterField(
model_name='purchasecontract',
name='actualshipping_at',
field=models.DateField(blank=True, null=True, verbose_name='实际出发日子'),
),
migrations.AlterField(
model_name='purchasecontract',
name='contracted_at',
field=models.DateField(verbose_name='合同日子'),
),
migrations.AlterField(
model_name='purchasecontract',
name='currency',
field=models.CharField(choices=[('D', 'USD'), ('R', 'RMB')], default='R', max_length=1),
),
migrations.AlterField(
model_name='purchasecontract',
name='portofdestination',
field=models.CharField(max_length=100, verbose_name='货到的港口'),
),
migrations.AlterField(
model_name='purchasecontract',
name='predictdelivery_at',
field=models.DateField(verbose_name='到达计划日子'),
),
migrations.AlterField(
model_name='purchasecontract',
name='shipping_at',
field=models.DateField(verbose_name='出发计划日子'),
),
migrations.AlterField(
model_name='salescontract',
name='actualshipping_at',
field=models.DateField(blank=True, null=True, verbose_name='实际 Shipping 日子'),
),
migrations.AlterField(
model_name='salescontract',
name='contracted_at',
field=models.DateField(verbose_name='合同日子'),
),
migrations.AlterField(
model_name='salescontract',
name='currency',
field=models.CharField(choices=[('D', 'USD'), ('R', 'RMB')], default='D', max_length=1),
),
migrations.AlterField(
model_name='salescontract',
name='devliveryrequest',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='其他'),
),
migrations.AlterField(
model_name='salescontract',
name='portofdestination',
field=models.CharField(max_length=100, verbose_name='目的地 港口'),
),
migrations.AlterField(
model_name='salescontract',
name='portofloading',
field=models.CharField(max_length=100, verbose_name='Shipping 港口'),
),
migrations.AlterField(
model_name='salescontract',
name='shipping_at',
field=models.DateField(verbose_name='Shipping 计划日子'),
),
]
| [
"[email protected]"
] | |
98fab1f0de73796ef769a8a4f729188096167ece | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/MybankCreditLoantradePayerBillrepayConsultResponse.py | 384aaa839bacae7ad07341b1ed1c53574be2462c | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,716 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.CreditPayBillDetailVO import CreditPayBillDetailVO
class MybankCreditLoantradePayerBillrepayConsultResponse(AlipayResponse):
def __init__(self):
super(MybankCreditLoantradePayerBillrepayConsultResponse, self).__init__()
self._bill_details = None
self._exist_bill = None
self._repay_url = None
@property
def bill_details(self):
return self._bill_details
@bill_details.setter
def bill_details(self, value):
if isinstance(value, list):
self._bill_details = list()
for i in value:
if isinstance(i, CreditPayBillDetailVO):
self._bill_details.append(i)
else:
self._bill_details.append(CreditPayBillDetailVO.from_alipay_dict(i))
@property
def exist_bill(self):
return self._exist_bill
@exist_bill.setter
def exist_bill(self, value):
self._exist_bill = value
@property
def repay_url(self):
return self._repay_url
@repay_url.setter
def repay_url(self, value):
self._repay_url = value
def parse_response_content(self, response_content):
response = super(MybankCreditLoantradePayerBillrepayConsultResponse, self).parse_response_content(response_content)
if 'bill_details' in response:
self.bill_details = response['bill_details']
if 'exist_bill' in response:
self.exist_bill = response['exist_bill']
if 'repay_url' in response:
self.repay_url = response['repay_url']
| [
"[email protected]"
] | |
e81b997e68036cdf35265aa603f9459629fc05d1 | 8a8bae1fc33dc503e55b1c6a5a67d90469331891 | /ppy_terminal/sketches/tarbell/happyplace_port/archive_code/happyplace_port_2880275_20200918_112602_000.png.py | 8a9f02346216e30caa2158a2bb5e3042747bd85c | [
"MIT"
] | permissive | LSturtew/generative_art | 071f168c92d2bb31b57558058d881bad6cf4d7aa | 76d98d8fa63e2c952055481d14ab53f56717b6dd | refs/heads/master | 2022-12-30T02:41:36.391487 | 2020-10-12T01:43:48 | 2020-10-12T01:43:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,542 | py | ################################################################################
# porting Jared Tarbell's Happy Place to Python Processing
# all credit for the algorithm goes to them
#
# code and images by Aaron Penne
# https://github.com/aaronpenne
#
# released under MIT License (https://opensource.org/licenses/MIT)
################################################################################
################################################################################
# Imports
################################################################################
# Processing mode uses Python 2.7 but I prefer Python 3.x, pull in future tools
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import with_statement
# Normal Python imports
import os
import sys
import shutil
import logging
from datetime import datetime
from collections import OrderedDict
from random import seed, shuffle, sample
################################################################################
# Globals
################################################################################
# Knobs to turn
w = 900
h = 900
use_seed = True
rand_seed = 2880275
num = 240 # number of friends
numpal = 512 # number of colors in palette
good_colors = []
friends = []
# Utility variables
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
script_path = os.path.abspath(__file__)
script_name = os.path.basename(script_path)
script_ext = os.path.splitext(script_name)[1]
sketch_name = os.path.splitext(script_name)[0]
# Initialize random number generators with seed
if not use_seed:
rand_seed = int(random(99999,9999999))
randomSeed(rand_seed)
noiseSeed(rand_seed)
seed(rand_seed)
################################################################################
# Helper methods
#
# These exist here in the script instead of a separate centralized file to
# preserve portability and ability to recreate image with a single script
################################################################################
# Standardizes log formats
# ex. 2020-06-31 12:30:55 - INFO - log is better than print
logging.basicConfig(level=logging.INFO,
stream=sys.stdout,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
log = logging.getLogger(__name__)
def get_filename(counter):
"""Standardizes filename string format
ex. comet_12345_20200631_123055_001.png
"""
return '{}_{}_{}_{:03d}.png'.format(sketch_name, rand_seed, timestamp, counter)
def make_dir(path):
"""Creates dir if it does not exist"""
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def save_code(pg=None, path='output', counter=0):
"""Saves image and creates copy of this script"""
make_dir(path)
output_file = get_filename(counter)
output_path = os.path.join(path, output_file)
make_dir('archive_code')
src = script_path
dst = os.path.join('archive_code', output_file + script_ext)
shutil.copy(src, dst)
def save_graphic(pg=None, path='output', counter=0):
"""Saves image and creates copy of this script"""
make_dir(path)
output_file = get_filename(counter)
output_path = os.path.join(path, output_file)
if pg:
pg.save(output_path)
else:
save(output_path)
log.info('Saved to {}'.format(output_path))
def color_tuple(c, color_space='HSB', rounded=True):
"""Takes color (Processing datatype) and returns human readable tuple."""
if color_space == 'HSB':
c_tuple = (hue(c), saturation(c), brightness(c), alpha(c))
if color_space == 'RGB':
c_tuple = (red(c), green(c), blue(c), alpha(c))
if rounded:
c_tuple = (round(c_tuple[0]), round(c_tuple[1]), round(c_tuple[2]), round(c_tuple[3]))
return c_tuple
def extract_colors(img_filename, max_colors=100, randomize=True):
"""Extracts unique pixels from a source image to create a color palette.
If randomize=False then the image is sampled left to right, then top to bottom.
"""
colors_list = []
img = loadImage(img_filename)
img.loadPixels()
if randomize:
shuffle(img.pixels)
num_colors = 0
for i,c in enumerate(img.pixels):
# only grab new colors (no repeats)
if color_tuple(c) not in [color_tuple(gc) for gc in colors_list]:
colors_list.append(c)
num_colors += 1
if num_colors == max_colors:
break
return colors_list
def sort_color_hues(colors_list, sort_on='hsb'):
"""Takes list of colors (Processing datatype) and sorts the list on hue"""
colors_tuples = [color_tuple(c) for c in colors_list]
if sort_on == 'hsb':
colors = sorted(zip(colors_tuples, colors_list), key=lambda x: (x[0][0], x[0][1], x[0][2]))
if sort_on == 'bsh':
colors = sorted(zip(colors_tuples, colors_list), key=lambda x: (x[0][2], x[0][1], x[0][0]))
return [c for _,c in colors]
def some_color():
return good_colors[int(random(numpal))]
def reset_all():
global friends
for i in range(num):
fx = w/2 + 0.4*w*cos(TAU*i/num)
fy = h/2 + 0.4*h*sin(TAU*i/num)
friends[i] = Friend(fx, fy, i)
for i in range(int(num*2.2)):
a = int(floor(random(num)))
b = int(floor(a+random(22))%num)
if (b >= num) or (b < 0):
b = 0
print('+')
if a != b:
friends[a].connect_to(b)
friends[b].connect_to(a)
print('{} made friends with {}'.format(a,b))
################################################################################
# Setup
################################################################################
def setup():
size(w, h)
colorMode(HSB, 360, 100, 100, 100)
#colorMode(HSB)
strokeWeight(2)
global good_colors
good_colors = extract_colors('mojave.png', numpal)
background(0, 0, 100)
frameRate(30)
global friends
friends = [Friend() for i in range(num)]
print(len(friends))
reset_all()
save_code(None, 'output', frameCount)
#noLoop()
################################################################################
# Draw
################################################################################
def draw():
#for c in good_colors:
# print(color_tuple(c))
for f in friends:
f.move()
for f in friends:
f.expose()
f.expose_connections()
for f in friends:
f.find_happy_place()
if frameCount % 200 == 0:
save_graphic(None, 'output', frameCount)
if frameCount % 20 == 0:
print(frameCount, frameRate)
#exit()
def mousePressed():
save_graphic(None, 'output', frameCount)
class Friend:
def __init__(self, x=0, y=0, identifier=0):
self.x = x
self.y = y
self.vx = 0
self.vy = 0
self.id = identifier
self.numcon = 0
self.maxcon = 10
self.lencon = 10+int(random(50))
self.connections = [0 for i in range(self.maxcon)]
self.myc = some_color()
self.myc = color(hue(self.myc), saturation(self.myc), brightness(self.myc), 5)
self.numsands = 3
self.sands = [SandPainter() for i in range(self.numsands)]
def connect_to(self, f):
if (self.numcon < self.maxcon):
if not self.friend_of(f):
self.connections[self.numcon] = f
self.numcon += 1
def friend_of(self, f):
#FIXME possibly replace with simple is in?
is_friend = False
for i in range(self.numcon):
if self.connections[i] == f:
is_friend = True
return is_friend
def expose(self):
for dx in range(-2,3):
a = 0.3-abs(dx)/5
stroke(0, 0, 0, 100*a)
point(self.x+dx, self.y)
stroke(0, 0, 100,100*a)
point(self.x+dx-1, self.y-1)
for dy in range(-2,3):
a = 0.3-abs(dy)/5
stroke(0, 0, 0, 100*a)
point(self.x, self.y+dy)
stroke(0, 0, 100, 100*a)
point(self.x-1, self.y+dy-1)
def expose_connections(self):
stroke(self.myc)
for i in range(self.numcon):
ox = friends[self.connections[i]].x
oy = friends[self.connections[i]].y
#line(self.x, self.y, ox, oy)
for s in range(self.numsands):
self.sands[s].render(self.x, self.y, ox, oy)
def find_happy_place(self):
# self.vx += random(-w*0.001, w*0.001)
# self.vy += random(-h*0.001, h*0.001)
ax = 0
ay = 0
for n in range(num):
if friends[n] <> this:
ddx = friends[n].x - self.x
ddy = friends[n].y - self.y
d = sqrt(ddx*ddx + ddy*ddy)
t = atan2(ddy, ddx)
friend = False
for j in range(self.numcon):
if self.connections[j]==n:
friend=True
if friend:
# attract
if d>self.lencon:
ax += 4*cos(t)
ay += 4*sin(t)
self.vx += ax/80
self.vy += ay/80
def move(self):
self.x += self.vx
self.y += self.vy
self.vx *= 0.99
self.vy *= 0.99
class SandPainter:
def __init__(self):
self.p = random(1)
self.c = some_color()
self.g = random(0.01, 0.1)
def render(self, x, y, ox, oy):
stroke(hue(self.c), saturation(self.c), brightness(self.c), 10)
point(ox + (x-ox)*sin(self.p), oy+(y-oy)*sin(self.p))
self.g += random(-0.05, 0.05)
maxg = 0.22
if (self.g < -maxg):
self.g = -maxg
if (self.g > maxg):
self.g = maxg
w = self.g/10
for i in range(11):
a = 0.1 - i/110
stroke(hue(self.c), saturation(self.c), brightness(self.c), 100*a)
point(ox+(x-ox)*sin(self.p+sin(i*w)), oy+(y-oy)*sin(self.p+sin(i*w)))
point(ox+(x-ox)*sin(self.p-sin(i*w)), oy+(y-oy)*sin(self.p-sin(i*w)))
| [
"[email protected]"
] | |
eb19a9e76546f3274aeb55a5941ccfbf0c448ec4 | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/.install/.backup/lib/googlecloudsdk/command_lib/util/time_util.py | fa4aca709d2e13980911d752c895797f68fe0372 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 3,224 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module for capturing time-related functions.
This makes mocking for time-related functionality easier.
"""
import calendar
import datetime
import re
import time
def CurrentTimeSec():
"""Returns a float of the current time in seconds."""
return time.time()
def Sleep(duration_sec):
"""Sleeps for the given duration."""
time.sleep(duration_sec)
def CurrentDatetimeUtc():
"""Returns the current date and time in the UTC timezone."""
return datetime.datetime.utcnow()
def IsExpired(timestamp_rfc3993_str):
no_expiration = ''
if timestamp_rfc3993_str == no_expiration:
return False
timestamp_unix = Strptime(timestamp_rfc3993_str)
if timestamp_unix < CurrentTimeSec():
return True
return False
# Parsing code for rfc3339 timestamps, taken from Google's rfc3339.py.
# TODO(user): Investigate opensourcing rfc3999.py
def Strptime(rfc3339_str):
"""Converts an RFC 3339 timestamp to Unix time in seconds since the epoch.
Args:
rfc3339_str: a timestamp in RFC 3339 format (yyyy-mm-ddThh:mm:ss.sss
followed by a time zone, given as Z, +hh:mm, or -hh:mm)
Returns:
a number of seconds since January 1, 1970, 00:00:00 UTC
Raises:
ValueError: if the timestamp is not in an acceptable format
"""
match = re.match(r'(\d\d\d\d)-(\d\d)-(\d\d)T'
r'(\d\d):(\d\d):(\d\d)(?:\.(\d+))?'
r'(?:(Z)|([-+])(\d\d):(\d\d))', rfc3339_str)
if not match:
raise ValueError('not a valid timestamp: %r' % rfc3339_str)
(year, month, day, hour, minute, second, frac_seconds,
zulu, zone_sign, zone_hours, zone_minutes) = match.groups()
time_tuple = map(int, [year, month, day, hour, minute, second])
# Parse the time zone offset.
if zulu == 'Z': # explicit
zone_offset = 0
else:
zone_offset = int(zone_hours) * 3600 + int(zone_minutes) * 60
if zone_sign == '-':
zone_offset = -zone_offset
integer_time = calendar.timegm(time_tuple) - zone_offset
if frac_seconds:
sig_dig = len(frac_seconds)
return ((integer_time * (10 ** sig_dig)
+ int(frac_seconds)) * (10 ** -sig_dig))
else:
return integer_time
def CalculateExpiration(num_seconds):
"""Takes a number of seconds and returns the expiration time in RFC 3339."""
if num_seconds is None:
return None
utc_now = CurrentDatetimeUtc()
adjusted = utc_now + datetime.timedelta(0, int(num_seconds))
formatted_expiration = _FormatDateString(adjusted)
return formatted_expiration
def _FormatDateString(d):
return ('%04d-%02d-%02dT%02d:%02d:%02dZ' %
(d.year, d.month, d.day, d.hour, d.minute, d.second))
| [
"[email protected]"
] | |
643d8479d0d4be592474bfdee79d5f9f22cb89e1 | bb87579e47fc04b299694b8a8fe318f022f54ee8 | /Automate the Boring Stuff/Ch.12 - Web Scraping/attribute.py | c4f59a6a44dfa7c2b6213063120b221b7c1a6447 | [] | no_license | QaisZainon/Learning-Coding | 7bbc45197085dfa8f41ac298d26cf54e99e7b877 | a3991842e79c30f24d7bc0cca77dbd09bc03372f | refs/heads/master | 2022-12-23T05:47:26.512814 | 2020-09-25T08:10:20 | 2020-09-25T08:10:20 | 297,945,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 317 | py | from selenium import webdriver
browser = webdriver.Firefox()
browser.get('https://inventwithpython.com')
try:
elem = browser.find_element_by_class_name('cover-thumb')
print('Found <%s> element with that class name!' %(elem.tag_name))
except:
print('Was not able to find an element with that name.') | [
"[email protected]"
] | |
6959b4c9ba3b87040a8e31e260f2243d2fc88cba | b5f5c749ad8ba774da04a3dcf44ea2e66aea6cd6 | /background/05geo/coverage.py | a80d605b766bf88a40e1d82d403f040d271128b2 | [] | no_license | yj-git/SearchRescueSys | 2329be5f3caf57f11a2e606da87382344698eff4 | 44347aef4cd5f75f1c9adcea76c21aa97b41e8ae | refs/heads/master | 2022-08-05T12:41:54.967248 | 2020-05-21T04:55:37 | 2020-05-21T04:55:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | from geoserver.support import ResourceInfo
from geoserver.catalog import Catalog
class Coverage(ResourceInfo):
def __init__(self, catalog: Catalog, store_name, work_space):
super().__init__()
self.catalog = catalog
self.store_name = store_name
self.work_space = work_space
self.gs_version = self.catalog.get_short_version()
@property
def href(self):
return f"{self.catalog.service_url}/workspaces/{self.work_space}/coveragestores/{self.store_name}/coverages"
| [
"[email protected]"
] | |
0bc60f23731424fe5898e28b2074f5662c17e943 | e05c6a78b16f1d39b8e77db3ee5cea83b44ddf8a | /migrations/versions/0305m_add_pinpoint.py | 6dd09943085a1a433ea9ebe35523a519e6c9f0ff | [
"MIT"
] | permissive | cds-snc/notification-api | 7da0928f14608a2c7db1e229e17b9dbfaaf3d0f0 | 99558db51784925942d031511af3cfb03338a28d | refs/heads/main | 2023-08-18T00:08:42.787361 | 2023-08-17T20:58:47 | 2023-08-17T20:58:47 | 194,884,758 | 49 | 12 | MIT | 2023-09-14T18:55:07 | 2019-07-02T14:57:01 | Python | UTF-8 | Python | false | false | 896 | py | """
Revision ID: 0305m_add_pinpoint
Revises: 0305l_smtp_template
Create Date: 2020-04-20 12:00:00
"""
import uuid
from alembic import op
revision = "0305m_add_pinpoint"
down_revision = "0305l_smtp_template"
id = uuid.uuid4()
def upgrade():
op.execute(
f"""
INSERT INTO provider_details (id, display_name, identifier, priority, notification_type, active, version)
VALUES ('{id}', 'AWS Pinpoint', 'pinpoint', 50, 'sms', true, 1)
"""
)
op.execute(
f"""
INSERT INTO provider_details_history (id, display_name, identifier, priority, notification_type, active, version)
VALUES ('{id}', 'AWS Pinpoint', 'pinpoint', 50, 'sms', true, 1)
"""
)
def downgrade():
op.execute("DELETE FROM provider_details WHERE identifier = 'pinpoint'")
op.execute("DELETE FROM provider_details_history WHERE identifier = 'pinpoint'")
| [
"[email protected]"
] | |
1bfb37fd8a7fdad73e3394fb67e758cc3068b5b0 | 20927c6b6dbb360bf0fd13d70115bdb27e7196e7 | /0x0F-python-object_relational_mapping/1-filter_states.py | 2896d23cb05252078d33fd4184cfa458f4fcb904 | [] | no_license | PauloMorillo/holbertonschool-higher_level_programming | 27fc1c0a1ae5784bd22d07daaedb602ee618867d | 8a42a60aa4ea52b5cc2fb73e57f38aa6c5196c98 | refs/heads/master | 2021-08-16T17:13:45.568038 | 2020-07-29T01:20:25 | 2020-07-29T01:20:25 | 207,305,140 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | #!/usr/bin/python3
"""This Module prints all of a database"""
import sys
import MySQLdb
def main():
"""main function"""
db = MySQLdb.connect(host='localhost',
user=sys.argv[1],
passwd=sys.argv[2],
db=sys.argv[3],
port=3306)
cur = db.cursor()
cur.execute("SELECT * FROM states WHERE name REGEXP BINARY"
"'^N' ORDER BY id")
rows = cur.fetchall()
for _row in rows:
print(_row)
cur.close()
db.close()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
8e6b2a6221270da9a79fd2ca74b668333a09b391 | 47ae678aa432deb0eb4f99b6a9787853315ab899 | /qikan/spiders/SAGE58.py | ab7d30ebca329e46bdc015cf20b378342d85b27b | [] | no_license | RoggerLuo/python-scrapy-journal | 1f3fb2ac41d90d25a0b635932600ff2327bf22d1 | 38d8e714f346e5951bcb55487fc0056a834f30d8 | refs/heads/master | 2020-04-05T20:32:42.201876 | 2018-11-13T06:35:59 | 2018-11-13T06:35:59 | 157,185,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,153 | py | # -*- coding: utf-8 -*-
import scrapy
from qikan.items import QikanItem
import re
import time
from .config import Config,postItemWithPdf,postItem,proxyRequest
class Sage58Spider(scrapy.Spider):
name = 'SAGE58'
start_urls = ['http://journals.sagepub.com/toc/apca/5']
base_url = 'http://journals.sagepub.com'
def parse(self, response):
# 文章url
hrefs = response.xpath("//div[@class='art_title linkable']/a[@class='ref nowrap']/@href").extract()
volume = response.xpath(
"//div[@class='pager issueBookNavPager']/span[@class='journalNavCenterTd']/div[@class='journalNavTitle']/text()").extract()[
0]
for i in range(len(hrefs)):
yield proxyRequest(url=self.base_url + hrefs[i], meta={'annualVolume': volume}, callback=self.parse2)
def parse2(self, response):
item = QikanItem()
# 文章题目
item['title'] = ''
titles = response.xpath("//div[@class='hlFld-Title']//div[@class='publicationContentTitle']//h1").extract()
pat = re.compile('<[^>]+>', re.S)
for title in titles:
item['title'] = item['title'] + pat.sub('', title).strip()
# item['title'] = response.xpath("//div[@class='hlFld-Title']//div[@class='publicationContentTitle']//h1/text()").extract()[0].strip()
# # titles = response.xpath("//h2[@class='citation__title']/text()").extract()
# pat = re.compile('<[^>]+>', re.S)
# 作者
item['author'] = ''
# 通讯作者
# 通讯作者单位
aus = []
if response.xpath("//div[@class='header']/a[@class='entryAuthor']").extract():
authors = response.xpath("//div[@class='header']/a[@class='entryAuthor']").extract()
for author in authors:
item['author'] = item['author'] + pat.sub('', author).strip() + ","
else:
item['author'] = 'NULL'
if response.xpath(
"//div[@class='hlFld-ContribAuthor']/span[@class='NLM_contrib-group']/div[@class='artice-info-affiliation']/text()").extract():
item['authorAffiliation'] = response.xpath(
"//div[@class='hlFld-ContribAuthor']/span[@class='NLM_contrib-group']/div[@class='artice-info-affiliation']/text()").extract()[
0]
elif response.xpath(
"//div[@class='hlFld-ContribAuthor']/div[@class='artice-info-affiliation'][1]/text()").extract():
item['authorAffiliation'] = response.xpath(
"//div[@class='hlFld-ContribAuthor']/div[@class='artice-info-affiliation'][1]/text()").extract()[0]
elif response.xpath("//div[@class='artice-notes']//corresp//text()").extract():
item['authorAffiliation'] = response.xpath("//div[@class='artice-notes']//corresp//text()").extract()[
0].replace('Email:', '')
else:
item['authorAffiliation'] = 'NULL'
item['authorAffiliation'] = item['authorAffiliation'].replace('\n', '').replace('\r', '').replace('\t',
'').replace(
' ', ' ')
# print(item['authorAffiliation'])
item['correspongdingauthorEmail'] = ''
if response.xpath("//a[@class='email']/span[@class='nobrWithWbr']").extract():
correspongdingauthorEmails = response.xpath("//a[@class='email']/span[@class='nobrWithWbr']").extract()
for correspongdingauthorEmail in correspongdingauthorEmails:
item['correspongdingauthorEmail'] = item['correspongdingauthorEmail'] + pat.sub('',
correspongdingauthorEmail).strip() + '||'
else:
item['correspongdingauthorEmail'] = 'NULL'
# item['correspongdingauthorEmail'] = response.xpath("//a[@class='email']/span[@class='nobrWithWbr']").extract()
if response.xpath(
"//div[@class='hlFld-ContribAuthor']/span[@class='contribDegrees'][1]/div[@class='authorLayer']/div[@class='header']/a[@class='entryAuthor']/text()").extract():
item['correspongdingauthor'] = response.xpath(
"//div[@class='hlFld-ContribAuthor']/span[@class='contribDegrees'][1]/div[@class='authorLayer']/div[@class='header']/a[@class='entryAuthor']/text()").extract()[
0] + '||'
else:
item['correspongdingauthor'] = 'NULL'
# # DOI号
if item['correspongdingauthor'] == 'NULL':
item['correspongdingauthor'] = 'NULL'
elif item['correspongdingauthor'] != '':
correspongdingau = item['correspongdingauthor'].split("||")
correspongdingEm = item['correspongdingauthorEmail'].split("||")
item['correspongdingauthor'] = ''
for i in range(len(correspongdingau)):
if correspongdingau[i] != '':
item['correspongdingauthor'] += '(' + correspongdingau[i] + ',' + correspongdingEm[i] + '),'
else:
item['correspongdingauthor'] = 'NULL'
# print(item['correspongdingauthor'])
item['DOI'] = response.xpath(
"//div[@class='widget-body body body-none body-compact-all']/div[@class='doiWidgetContainer']/a[@class='doiWidgetLink']/text()").extract()[
0]
# # print(item['DOI'])
# # 没有关键词
item['keyword'] = ''
if response.xpath("//div[@class='hlFld-KeywordText']/kwd-group/a[@class='attributes']/text()").extract():
keywords = response.xpath(
"//div[@class='hlFld-KeywordText']/kwd-group/a[@class='attributes']/text()").extract()
for keyword in keywords:
item['keyword'] = item['keyword'] + keyword + ','
else:
item['keyword'] = 'NULL'
# # 摘要
item['abstract'] = ''
pat = re.compile('<[^>]+>', re.S)
if response.xpath("//div[@class='hlFld-Abstract']//div[@class='abstractSection abstractInFull']//p"):
coninfos = response.xpath(
"//div[@class='hlFld-Abstract']//div[@class='abstractSection abstractInFull']//p").extract()
for coninfo in coninfos:
item['abstract'] = item['abstract'] + pat.sub('', coninfo).strip() + '<br>'
else:
item['abstract'] = 'NULL'
item['abstract'] = item['abstract'].replace('\n', '')
# print(item['abstract'])
header = {
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3013.3 Safari/537.36'
}
if response.xpath(
"//div[@class='rightMobileMenuButton articleToolsButton PDFTool pdf-access redButton smallButton']/a/@href").extract():
pdf = response.xpath(
"//div[@class='rightMobileMenuButton articleToolsButton PDFTool pdf-access redButton smallButton']/a/@href").extract()[
0]
item['pdf'] = self.base_url + pdf
yield proxyRequest(url=self.base_url + pdf, meta={'filename': pdf.split('/')[-1] + '.pdf'}, headers=header,
callback=postItemWithPdf(item)
)
else:
item['pdf'] = 'NULL'
postItem(item)
# print(item['pdf'])
# 卷,期,年
item['annualVolume'] = response.meta['annualVolume'].strip()
# item['annualVolume'] = response.xpath("//div[@class='Article information']/div[1]/text()").extract()[0].strip()
# item['annualVolume'] = pat.sub('', annualVolume).strip()
# print(item['annualVolume'])
# 页码
item['pageNumber'] = 'NULL'
# print(pageNumber)
# ru2 = re.compile(r'pp (.*)')
# # 页码
# item['pageNumber'] = ru2.search(pageNumber).group(1)
# print(item['pageNumber'])
# 期刊名
item['journalTitle'] = pat.sub('', response.xpath(
"//div[@id='e3c018c7-8573-4acd-93ae-0ff4b1f3baf3']/div[@class='wrapped ']").extract()[0]).strip()
# print(item['journalTitle'])
# 有些期刊目录有一张图片
item['imageUrlList'] = 'NULL'
# 12 July 2018
item['publishTime'] = response.xpath("//span[@class='publicationContentEpubDate dates']/text()").extract()[
1].strip()
# 改成2018-07-12
temp = time.strptime(item['publishTime'], "%B %d, %Y")
item['publishTime'] = time.strftime("%Y-%m-%d", temp)
# print(item['publishTime'])
yield item
# # 下载pdf
def downloadpdf(self, response):
file_path = Config().pdf_url + response.meta['filename']
with open(file_path, 'wb') as f:
f.write(response.body)
# #下载图片
def downloadimg(self, response):
file_path = Config().img_url + response.meta['filename']
with open(file_path, 'wb') as f:
f.write(response.body)
| [
"[email protected]"
] | |
cfc1b386f4a20a5d866800567c6b0b276a19ef98 | 469772806152cff25b13a1e73ec5133ba3d0f283 | /src/reversi_zero/agent/api.py | 6ba0bb23675d7c7cd12f8ccfc0178d0c3a3fbb6a | [
"MIT"
] | permissive | awesome-archive/reversi-alpha-zero | 880e92cb02a8b4d21e824baed3584a7eec823bfe | 90ba711f2233660bbf36d8203873b3fc16f7a1e8 | refs/heads/master | 2022-03-30T13:22:21.547259 | 2017-11-22T01:16:59 | 2017-11-22T01:16:59 | 111,628,182 | 0 | 0 | MIT | 2020-01-10T11:11:25 | 2017-11-22T02:47:19 | Python | UTF-8 | Python | false | false | 688 | py | from reversi_zero.config import Config
class ReversiModelAPI:
def __init__(self, config: Config, agent_model):
"""
:param config:
:param reversi_zero.agent.model.ReversiModel agent_model:
"""
self.config = config
self.agent_model = agent_model
def predict(self, x):
assert x.ndim in (3, 4)
assert x.shape == (2, 8, 8) or x.shape[1:] == (2, 8, 8)
orig_x = x
if x.ndim == 3:
x = x.reshape(1, 2, 8, 8)
policy, value = self.agent_model.model.predict_on_batch(x)
if orig_x.ndim == 3:
return policy[0], value[0]
else:
return policy, value
| [
"[email protected]"
] | |
37e1ff1235046fbc5c983001e7cfb790efd4803e | 23ba854b3b6cb457c8c01793e24f15d411650281 | /monk/system/summary.py | a87cc621feca3f74387e5c3e0cc0a91ab73cf0a4 | [
"Apache-2.0"
] | permissive | shaunstanislauslau/monk_v1 | a506a8cb2e45f3d04734bfab01db09eb3d804771 | 01905b911f1757adef9d7366a704b2a5289e1095 | refs/heads/master | 2023-05-11T08:16:39.442925 | 2020-03-03T07:45:06 | 2020-03-03T07:45:06 | 244,685,539 | 0 | 0 | Apache-2.0 | 2023-05-09T19:02:15 | 2020-03-03T16:25:46 | null | UTF-8 | Python | false | false | 8,361 | py | from system.imports import *
from system.common import read_json
@accepts(str, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=True)
def print_summary(fname):
system_dict = read_json(fname);
#############################################################################################################################
print("");
print("");
print("Experiment Summary");
print("");
print("System");
print(" Project Name: {}".format(system_dict["project_name"]));
print(" Project Dir: {}".format(system_dict["project_dir_relative"]));
print(" Experiment Name: {}".format(system_dict["experiment_name"]));
print(" Experiment Dir: {}".format(system_dict["experiment_dir_relative"]));
print(" Library: {}".format(system_dict["library"]));
print(" Origin: {}".format(system_dict["origin"]));
print("");
#############################################################################################################################
#############################################################################################################################
print("Dataset");
print(" Status: {}".format(system_dict["dataset"]["status"]));
if(system_dict["dataset"]["status"]):
print(" Dataset Type: {}".format(system_dict["dataset"]["dataset_type"]));
print(" Train path: {}".format(system_dict["dataset"]["train_path"]));
print(" Val path: {}".format(system_dict["dataset"]["val_path"]));
print(" Test path: {}".format(system_dict["dataset"]["test_path"]));
print(" CSV Train: {}".format(system_dict["dataset"]["csv_train"]));
print(" CSV Val: {}".format(system_dict["dataset"]["csv_val"]));
print(" CSV Test: {}".format(system_dict["dataset"]["csv_test"]));
print("");
print("Dataset Parameters:");
print(" Input Size: {}".format(system_dict["dataset"]["params"]["input_size"]));
print(" Batch Size: {}".format(system_dict["dataset"]["params"]["batch_size"]));
print(" Shuffle: {}".format(system_dict["dataset"]["params"]["train_shuffle"]));
print(" Processors: {}".format(system_dict["dataset"]["params"]["num_workers"]));
print(" Num Classes: {}".format(system_dict["dataset"]["params"]["num_classes"]));
print("");
print("Dataset Transforms:");
print(" Train transforms: {}".format(system_dict["dataset"]["transforms"]["train"]));
print(" Val transforms: {}".format(system_dict["dataset"]["transforms"]["val"]));
print(" Test transforms: {}".format(system_dict["dataset"]["transforms"]["test"]));
print("");
#############################################################################################################################
#############################################################################################################################
print("Model");
print(" Status:".format(system_dict["model"]["status"]));
if(system_dict["model"]["status"]):
print(" Model Name: {}".format(system_dict["model"]["params"]["model_name"]));
print(" Use Gpu: {}".format(system_dict["model"]["params"]["use_gpu"]));
print(" Use pretrained weights: {}".format(system_dict["model"]["params"]["use_pretrained"]));
print(" Base network weights freezed: {}".format(system_dict["model"]["params"]["freeze_base_network"]));
print(" Number of trainable parameters: {}".format(system_dict["model"]["params"]["num_params_to_update"]));
print("")
#############################################################################################################################
#############################################################################################################################
print("Hyper-Parameters");
print(" Status: {}".format(system_dict["hyper-parameters"]["status"]));
if(system_dict["hyper-parameters"]["status"]):
print(" Optimizer: {}".format(system_dict["hyper-parameters"]["optimizer"]));
print(" Learning Rate Scheduler: {}".format(system_dict["hyper-parameters"]["learning_rate_scheduler"]));
print(" loss: {}".format(system_dict["hyper-parameters"]["loss"]));
print(" Num epochs: {}".format(system_dict["hyper-parameters"]["num_epochs"]));
print("");
#############################################################################################################################
#############################################################################################################################
print("");
print("Dataset Settings");
if("display_progress" in system_dict["training"]["settings"].keys()):
print(" Status: {}".format(True));
print(" Display progress: {}".format(system_dict["training"]["settings"]["display_progress"]));
print(" Display progress realtime: {}".format(system_dict["training"]["settings"]["display_progress_realtime"]));
print(" Save intermediate models: {}".format(system_dict["training"]["settings"]["save_intermediate_models"]));
print(" Save training logs: {}".format(system_dict["training"]["settings"]["save_training_logs"]));
print(" Intermediate model prefix: {}".format(system_dict["training"]["settings"]["intermediate_model_prefix"]));
else:
print(" Status: {}".format(False));
print("");
#############################################################################################################################
#############################################################################################################################
print("");
print("Training");
print(" Status: {}".format(system_dict["training"]["status"]));
if(system_dict["training"]["status"]):
print(" Model dir: {}".format(system_dict["model_dir_relative"]));
print(" Best validation accuracy: {}".format(system_dict["training"]["outputs"]["best_val_acc"]));
print(" Best validation accuracy epoch: {}".format(system_dict["training"]["outputs"]["best_val_acc_epoch_num"]));
print(" Training time: {}".format(system_dict["training"]["outputs"]["training_time"]));
print(" Epochs completed: {}".format(system_dict["training"]["outputs"]["epochs_completed"]));
print(" Max Gpu Usage: {}".format(system_dict["training"]["outputs"]["max_gpu_usage"]));
print("");
print("Training Log Files");
print(" Train accuracy: {}".format(system_dict["training"]["outputs"]["log_train_acc_history_relative"]));
print(" Train loss: {}".format(system_dict["training"]["outputs"]["log_train_loss_history_relative"]));
print(" Val accuracy: {}".format(system_dict["training"]["outputs"]["log_val_acc_history_relative"]));
print(" Val loss: {}".format(system_dict["training"]["outputs"]["log_val_loss_history_relative"]));
print("");
#############################################################################################################################
#############################################################################################################################
print("External Evaluation");
print(" Status: {}".format(system_dict["testing"]["status"]));
if(system_dict["testing"]["status"]):
print(" Evaluation Dataset path: {}".format(system_dict["dataset"]["test_path"]));
print(" Num Images: {}".format(system_dict["testing"]["num_images"]));
print(" Num correct predictions: {}".format(system_dict["testing"]["num_correct_predictions"]));
print(" Overall Accuracy: {} %".format(system_dict["testing"]["percentage_accuracy"]));
print("");
#############################################################################################################################
| [
"[email protected]"
] | |
7389d460abe517d9e993221e0b2c9acc6154d6ab | 916c49b17d730ae36ce3fe8178146baac53fb15d | /common/ecmp/base.py | 10d355316e17a8b6561646a9b9de047fdebde323 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | absurya4/tf-test | ec96955ed9ddd662112173d2ff14059cd8d49552 | f1faeca6e8a0abbc0efd77455379163d61e3a3d7 | refs/heads/master | 2022-12-18T08:25:20.375604 | 2020-09-21T07:31:53 | 2020-09-24T07:09:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,007 | py | from common.base import GenericTestBase
import os
from tcutils.util import get_random_name, get_random_cidr
class ECMPTestBase(GenericTestBase):
@classmethod
def setUpClass(cls):
super(ECMPTestBase, cls).setUpClass()
cls.inputs.set_af(cls.get_af())
try:
# Mgmt VN
cls.mgmt_vn_name = get_random_name('mgmt_%s' % (
cls.inputs.project_name))
cls.mgmt_vn_subnets = [get_random_cidr(af=cls.inputs.get_af())]
cls.mgmt_vn_fixture = cls.create_only_vn(
cls.mgmt_vn_name, cls.mgmt_vn_subnets)
# Left VN
cls.left_vn_name = get_random_name('left_%s' % (
cls.inputs.project_name))
cls.left_vn_subnets = [get_random_cidr(af=cls.inputs.get_af())]
cls.left_vn_fixture = cls.create_only_vn(cls.left_vn_name,
cls.left_vn_subnets)
# Right VN
cls.right_vn_name = get_random_name('right_%s' % (
cls.inputs.project_name))
cls.right_vn_subnets = [get_random_cidr(af=cls.inputs.get_af())]
cls.right_vn_fixture = cls.create_only_vn(cls.right_vn_name,
cls.right_vn_subnets)
#if cls.inputs.get_af() == 'v6':
# cls.left_vn_subnets += [get_random_cidr()]
# cls.right_vn_subnets += [get_random_cidr()]
if cls.inputs.is_ci_setup() and cls.inputs.get_af() == 'v4':
cls.image_name = cls.inputs.get_ci_image()
else:
cls.image_name = 'cirros-traffic'
# End Vms
cls.left_vm_name = get_random_name('left_vm_%s' % (
cls.inputs.project_name))
cls.left_vm_fixture = cls.create_only_vm(cls.left_vn_fixture,
vm_name=cls.left_vm_name,
image_name=cls.image_name)
cls.right_vm_name = get_random_name('right_vm_%s' % (
cls.inputs.project_name))
cls.right_vm_fixture = cls.create_only_vm(cls.right_vn_fixture,
vm_name=cls.right_vm_name,
image_name=cls.image_name)
except:
cls.tearDownClass()
raise
cls.common_args = { 'mgmt_vn_name' : cls.mgmt_vn_name,
'mgmt_vn_subnets' : cls.mgmt_vn_subnets,
'mgmt_vn_fixture' : cls.mgmt_vn_fixture,
'left_vn_name' : cls.left_vn_name,
'left_vn_subnets' : cls.left_vn_subnets,
'left_vn_fixture' : cls.left_vn_fixture,
'left_vm_name' : cls.left_vm_name,
'left_vm_fixture' : cls.left_vm_fixture,
'right_vn_name' : cls.right_vn_name,
'right_vn_subnets' : cls.right_vn_subnets,
'right_vn_fixture' : cls.right_vn_fixture,
'right_vm_name' : cls.right_vm_name,
'right_vm_fixture' : cls.right_vm_fixture,
'image_name' : cls.image_name }
# end setUpClass
@classmethod
def cleanUpObjects(cls):
cls.safe_cleanup('right_vm_fixture')
cls.safe_cleanup('left_vm_fixture')
cls.safe_cleanup('left_vn_fixture')
cls.safe_cleanup('right_vn_fixture')
cls.safe_cleanup('mgmt_vn_fixture')
# end cleanUpObjects
@classmethod
def tearDownClass(cls):
cls.cleanUpObjects()
super(ECMPTestBase, cls).tearDownClass()
# end tearDownClass
| [
"[email protected]"
] | |
7f69dbce010f14ac2debdf2734632872607447c1 | 7f53f509222f7e4b1ca8137bb31cf2edc5f64e80 | /spec.py | 285af57a848c75051fd2652556eb8d7fba9fe315 | [] | no_license | wahid999/Pythonforbegainer | f61c7567c37b3d4103b5550a6975f78c960763f8 | 794fd9471ff95eac52ae42d8548526c09df23bbd | refs/heads/main | 2023-07-11T22:31:31.435440 | 2021-08-16T17:18:35 | 2021-08-16T17:18:35 | 399,344,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | import os
os.system('clear')
#************** Strings ****************
#------------->CANCATINATINATION-------->
name = 'Wahid Hussain'
greetings = 'hello, My name is ' + name
print (greetings[24])
| [
"[email protected]"
] | |
5e50baac929a6e64e65d334d5456747280a5306c | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flashblade/FB_2_6/models/logs_async_response.py | 246f276f1a09d0b9999298ed29d41641fb5f13a5 | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 3,125 | py | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.6, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_6 import models
class LogsAsyncResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[LogsAsync]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.LogsAsync]
):
"""
Keyword args:
items (list[LogsAsync])
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `LogsAsyncResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LogsAsyncResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LogsAsyncResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
96aa8ac46b3d2ef2095ea9e7c4211fe64ae7f673 | bb53229d1f296f8b7b3f7eb623673031474a4664 | /robot/model/arm/pets.py | 8523fe88eb018c1b1287289919557794aa76721a | [] | no_license | hzaskywalker/torch_robotics | 03f2de64972d47752e45ae0a0c30334bf6c52c6c | 0f3d5a46e81d734d514bffcbf4ed84cdcdbd4c86 | refs/heads/master | 2023-07-28T17:04:17.915787 | 2021-09-14T04:30:55 | 2021-09-14T04:30:55 | 405,022,434 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,897 | py | # reproduce the pets cem with the new framework
import os
import torch
import tqdm
from torch import nn
import numpy as np
from robot.controller.pets.model import EnBNN
from robot import U
from . import Frame, trainer
class Distribution(Frame):
# it always maintain dim-2 as the ensemble dimension..
def __init__(self, state, mean=None, log_var=None):
self.state = state
self.mean = mean
self.log_var = log_var
if state is None:
assert mean is not None and log_var is not None
self.state = self.sample(mean, log_var)
def iter(self):
return (self.state, self.mean, self.log_var)
def sample(self, mean, log_var):
# sample the
inp = mean
if log_var is not None:
inp += torch.randn_like(log_var) * torch.exp(log_var * 0.5) # sample
return inp
def calc_loss(self, label):
t = label.state
inv_var = torch.exp(-self.log_var)
loss = ((self.mean - t.detach()) ** 2) * inv_var + self.log_var
loss = loss.mean(dim=(0, 1, 3))
loss = loss.sum() # sum across different models
return {'loss': loss}
@classmethod
def from_observation(cls, observation):
# build new frame from the observation
return cls(observation[..., None, :], None, None) # notice, there is no mean, std, but only state
def as_observation(self):
return U.tocpu(self.state)
class CheetahFrame(Distribution):
input_dims = (5, 18, 6)
output_dims = (18, 18)
def as_input(self, action):
s = self.state
assert s.dim() == 3
inp = torch.cat([s[..., 1:2], s[..., 2:3].sin(), s[..., 2:3].cos(), s[..., 3:]], dim=-1)
return inp, action
def add(self, mean, std):
mean = torch.cat([mean[..., :1], self.state[..., 1:] + mean[..., 1:]], dim=-1)
return self.__class__(None, mean, std)
@classmethod
def obs_cost_fn(cls, obs):
return -obs[..., 0]
@classmethod
def ac_cost_fn(cls, acs):
return 0.1 * (acs ** 2).sum(dim=-1)
def compute_reward(self, s, a, goal):
return -(self.obs_cost_fn(s.state) + self.ac_cost_fn(a))
class ArmFrame(Distribution):
input_dims = (5, 21, 7)
output_dims = (14 + 3, 14)
def as_input(self, action):
s = self.state
q, dq = s[..., :7], s[..., 7:14]
inp = torch.cat([q.sin(), q.cos(), dq], dim=-1)
return inp, action
def add(self, mean, std):
# maybe I need to bound the rotations..?
mean = torch.cat([mean[...,:14]+self.state[...,:14], mean[...,14:17]], dim=-1)
return self.__class__(None, mean, std)
def compute_reward(self, s, a, g):
while g.dim() < self.state.dim():
g = g[None,:]
return -((self.state[...,14:17] - g)**2).sum(dim=-1) ** 0.5
class PlaneFrame(Distribution):
input_dims = (5, 2, 2)
output_dims = (2, 2)
def as_input(self, action):
return self.state, action
def add(self, mean, std):
return self.__class__(None, self.state + mean, std)
def compute_reward(self, s, a, g):
while g.dim() < self.state.dim():
g = g[None,:]
return -(((self.state - g) ** 2).sum(dim=-1)) ** 0.5
class EnsembleModel(EnBNN):
def __init__(self, inp_dims, oup_dims,
mid_channels = 200,
num_layers=5,
weight_decay = np.array([0.000025, 0.00005, 0.000075, 0.000075, 0.0001]),
var_reg=0.01):
self._weight_decay = weight_decay
self._var_reg = var_reg
ensemble_size, state_dim, action_dim = inp_dims
super(EnsembleModel, self).__init__(ensemble_size, state_dim + action_dim, oup_dims[0],
mid_channels=mid_channels, num_layers=num_layers)
def forward(self, obs, action):
obs = obs.transpose(1, 0)
if action.dim() == 2:
action = action[:, None] # add batch dimension
action = action.transpose(1, 0)
return [i.transpose(1, 0) for i in super(EnsembleModel, self).forward(obs, action)]
def loss(self):
return self._var_reg * self.var_reg() + self.decay(self._weight_decay)
class Dataset:
def __init__(self, max_timestep, size, batch_size, frame_type):
from robot.controller.pets.replay_buffer import ReplayBuffer
self.timestep = max_timestep
self.dataset = ReplayBuffer(self.timestep, size)
self.batch_size = batch_size
self.frame_type = frame_type
def store_episode(self, data):
self.dataset.store_episode(data)
def gen_data(self, num_train):
tmp = self.dataset.get()
s, a, t = tmp['obs'][..., :-1, :], tmp['actions'], tmp['obs'][..., 1:, :]
s = s.reshape(-1, s.shape[-1])
a = a.reshape(-1, a.shape[-1])
t = t.reshape(-1, t.shape[-1])
idxs = np.arange(len(s))
for _ in tqdm.trange(num_train):
idxs = np.random.permutation(idxs)
batch_size = self.batch_size
num_batch = (len(idxs) + batch_size - 1) // batch_size
for j in tqdm.trange(num_batch):
idx = idxs[j * batch_size:(j + 1) * batch_size]
state = self.frame_type.from_observation(s[idx])
future = self.frame_type.from_observation(t[idx][:, None])
yield state.cuda(), U.togpu(a[idx][:, None]), future.cuda()
class PetsRollout:
def __init__(self, model, frame_type, npart=20):
self.model = model
self.cls = frame_type
self.ensemble_size = self.model.model.ensemble_size
self.npart = npart
self.K = self.npart//self.ensemble_size
def rollout(self, obs, a, goal):
obs = U.togpu(obs)
a = U.togpu(a)
if goal is not None:
goal = U.togpu(goal)
s = self.cls.from_observation(obs).state
s = s.expand(-1, self.npart, -1).reshape(-1, self.ensemble_size, *s.shape[2:])
s = self.cls(s)
a = a[:, :, None, None, :].expand(-1, -1, self.K, self.ensemble_size, a.shape[-1]) # b, time, sene
a = a.transpose(2, 1).reshape(-1, a.shape[1], self.ensemble_size, a.shape[-1])
predict, reward = self.model.rollout(s, a, goal)
reward = reward.reshape(-1, self.K, self.ensemble_size)
reward = reward.mean(dim=(1, 2))
return None, -reward
class online_trainer(trainer):
def set_env(self):
from robot.controller.pets.envs import make
self.env, _ = make(self.args.env_name)
if self.args.env_name == 'cheetah':
self.frame_type = CheetahFrame
timestep = 1000
elif self.args.env_name == 'plane':
self.frame_type = PlaneFrame
timestep = 50
elif self.args.env_name == 'arm':
self.frame_type = ArmFrame
timestep = 100
else:
raise NotImplementedError
self.dataset = Dataset(timestep, int(1e6), 32, self.frame_type)
def set_model(self):
self.model = EnsembleModel(self.frame_type.input_dims, self.frame_type.output_dims)
def set_agent(self):
from .agents.simple_rollout import RolloutAgent
normalizer = nn.ModuleList([U.Normalizer((i,)) for i in self.frame_type.input_dims[1:]])
self.agent = RolloutAgent(self.model, lr=self.args.lr, loss_weights={'model_decay': 1.,'loss': 1.}, normalizers=normalizer).cuda()
def set_rollout_model(self):
self.rollout_predictor = PetsRollout(self.agent, self.frame_type, npart=20)
def set_policy(self):
self.set_rollout_model()
args = self.args
from .train_utils import RolloutCEM
env = self.env
self.controller = RolloutCEM(self.rollout_predictor, env.action_space, iter_num=5, horizon=30,
num_mutation=500, num_elite=50, device=args.device, alpha=0.1, trunc_norm=True,
lower_bound=env.action_space.low, upper_bound=env.action_space.high)
def epoch(self, num_train=5, num_valid=0, num_eval=0, use_tqdm=False):
print(f"########################EPOCH {self.epoch_num}###########################")
# update buffer by run once
self.agent.eval()
if self.epoch_num == 0:
policy = lambda x: self.env.action_space.sample()
else:
policy = self.controller
env = self.env
avg_reward, trajectories = U.eval_policy(policy, env, eval_episodes=1, save_video=0, progress_episode=True,
video_path=os.path.join(self.path, "video{}.avi"), return_trajectories=True,
timestep=self.dataset.timestep)
for i in trajectories:
obs = np.array([i['observation'] for i in i[0]])[None, :]
action = np.array(i[1])[None, :]
self.dataset.store_episode([obs, action])
obs_inp = obs[:, :-1].reshape(-1, obs.shape[-1])
action_inp = action.reshape(-1, action.shape[-1])
obs_inp, action_inp = self.frame_type.from_observation(U.togpu(obs_inp)).as_input(U.togpu(action_inp))
self.agent.update_normalizer([U.togpu(obs_inp), U.togpu(action_inp)])
# train with the dataset
self.agent.train()
train_output = []
for data in self.dataset.gen_data(num_train):
out = self.agent.update(*data)
train_output.append(out)
out = U.merge_training_output(train_output)
out['reward'] = avg_reward
self.vis(out)
| [
"[email protected]"
] | |
c6db899b17994956a0afb5dd82d2d133537fb664 | df258f9b95493d146ef8d3e9fef8ee367fe66042 | /dffml/db/sql.py | bbbac8bea799aad05894665845c119803c332d57 | [
"LicenseRef-scancode-generic-export-compliance",
"MIT"
] | permissive | emrul/dffml | dd576582de5a95d2cc3c525131d1b1f66dfa84c1 | 0829a5830fef85d24baa80220fa9cf7e56fee236 | refs/heads/master | 2022-04-22T20:37:45.109948 | 2020-04-24T15:08:23 | 2020-04-24T15:08:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,272 | py | """
Base classes to wrap various SQL based databases in dffml.db abstraction.
"""
from typing import Dict, Any, List, Tuple, Optional
from dffml.db.base import BaseDatabaseContext, Conditions
class SQLDatabaseContext(BaseDatabaseContext):
# BIND_DECLARATION is the string used to bind a param
BIND_DECLARATION: str = "?"
@classmethod
def make_condition_expression(cls, conditions):
"""
Returns a dict with keys 'expression','values' if conditions is not empty
else returns `None`
example::
Input : conditions = [
[["firstName", "=", "John"], ["lastName", "=", "Miles"]],
[["age", "<", "38"]],
]
Output : {
'expression':
'((firstName = ? ) OR (lastName = ? )) AND ((age < ? ))',
'values':
['John', 'Miles', '38']
}
"""
def _make_condition_expression(conditions):
def make_or(lst):
val_list = []
exp = []
for cnd in lst:
exp.append(
f"(`{cnd.column}` {cnd.operation} {cls.BIND_DECLARATION} )"
)
val_list.append(cnd.value)
result = {"expression": " OR ".join(exp), "values": val_list}
return result
lst = map(make_or, conditions)
result_exps = []
result_vals = []
for result in lst:
temp_exp = result["expression"]
temp_exp = f"({temp_exp})"
result_exps.append(temp_exp)
result_vals.extend(result["values"])
result_exps = " AND ".join(result_exps)
result = {"expression": result_exps, "values": result_vals}
return result
condition_dict = None
if (not conditions == None) and (len(conditions) != 0):
condition_dict = _make_condition_expression(conditions)
return condition_dict
def create_table_query(
self, table_name: str, cols: Dict[str, str], *args, **kwargs
) -> None:
"""
Creates a create query. Table with name ``table_name`` will be created
if it doesn't exist.
Parameters
----------
table_name : str
Name of the table.
`cols` : dict
Mapping of column names to type of columns.
Returns
-------
query : str
``CREATE`` query
"""
query = (
f"CREATE TABLE IF NOT EXISTS {table_name} ("
+ ", ".join([f"`{k}` {v}" for k, v in cols.items()])
+ ")"
)
return query
def insert_query(
self, table_name: str, data: Dict[str, Any], *args, **kwargs
) -> None:
"""
Creates insert query. Keys in ``data`` dict correspond to the columns in
``table_name``.
Parameters
----------
table_name : str
Name of the table.
data : dict, optional
Columns names are keys, values are data to insert.
Returns
-------
query : str
``INSERT`` query
parameters : tuple
Variables to bind
"""
col_exp = ", ".join([f"`{col}`" for col in data])
query = (
f"INSERT INTO {table_name} "
+ f"( {col_exp} )"
+ f" VALUES( {', '.join([self.BIND_DECLARATION] * len(data))} ) "
)
return query, list(data.values())
def update_query(
self,
table_name: str,
data: Dict[str, Any],
conditions: Optional[Conditions] = None,
) -> None:
"""
Creates update query setting values of rows (satisfying ``conditions``
if provided) with ``data`` in ``table_name``.
Parameters
----------
table_name : str
Name of the table.
data : dict, optional
Columns names to update mapped to value to set to.
conditions: Conditions, optional
Nested array of conditions to satisfy, becomes ``WHERE``.
Returns
-------
query : str
``UPDATE`` query
parameters : tuple
Variables to bind
"""
query_values = list(data.values())
condition_dict = self.make_condition_expression(conditions)
if condition_dict is not None:
condition_exp = condition_dict["expression"]
query_values.extend(condition_dict["values"])
else:
condition_exp = None
query = (
f"UPDATE {table_name} SET "
+ " ,".join([f"`{col}` = {self.BIND_DECLARATION}" for col in data])
+ (f" WHERE {condition_exp}" if condition_exp is not None else "")
)
return query, query_values
def lookup_query(
self,
table_name: str,
cols: Optional[List[str]] = None,
conditions: Optional[Conditions] = None,
) -> Tuple[str, Tuple[Any]]:
"""
Creates a query string and tuple of parameters used as bindings.
Parameters
----------
table_name : str
Name of the table.
cols : list, optional
Columns names to return
conditions: Conditions, optional
Nested array of conditions to satisfy, becomes ``WHERE``.
Returns
-------
query : str
``SELECT`` query
parameters : tuple
Variables to bind
"""
condition_dict = self.make_condition_expression(conditions)
query_values = []
if condition_dict is not None:
condition_exp = condition_dict["expression"]
query_values.extend(condition_dict["values"])
else:
condition_exp = None
if not cols:
col_exp = "*"
else:
col_exp = ", ".join([f"`{col}`" for col in cols])
query = f"SELECT {col_exp} FROM {table_name} " + (
f" WHERE {condition_exp}" if condition_exp is not None else ""
)
return query, query_values
def remove_query(
self, table_name: str, conditions: Optional[Conditions] = None
):
"""
Creates a delete query to remove rows from ``table_name`` (satisfying
``conditions`` if provided).
Parameters
----------
table_name : str
Name of the table.
conditions: Conditions, optional
Nested array of conditions to satisfy, becomes ``WHERE``.
Returns
-------
query : str
``DELETE`` query
parameters : tuple
Variables to bind
"""
condition_dict = self.make_condition_expression(conditions)
query_values = []
if condition_dict is not None:
condition_exp = condition_dict["expression"]
query_values = condition_dict["values"]
else:
condition_exp = None
query = f"DELETE FROM {table_name} " + (
f" WHERE {condition_exp}" if condition_exp is not None else ""
)
return query, query_values
| [
"[email protected]"
] | |
ca42f02af10ca35b68749902215dcc8c12068f13 | d832d79e61b86f6a7e1ebe5b9dcf6b28ebf23cb1 | /revnum.py | 0e2228dc6fa415c46244ad0d3cc696042920d649 | [] | no_license | moneshvenkul/guvi_player | 5f12657a2efc5db9385543f970ef0238cdfacfaf | 84cdd0cd3de3e77e50c871bc7cd7f115f1f40de0 | refs/heads/master | 2020-05-01T14:27:21.058668 | 2019-03-25T08:15:33 | 2019-03-25T08:15:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36 | py |
n=input()
x=n[::-1]
print(int(x))
| [
"[email protected]"
] | |
f2cc1601e225667554205561c993b3b430da90dd | 1edd52cf197e5ae67b5939a3beb3e70761334e62 | /AWS/AWS_boto3_narendra/20_Collections_stop_all_ec2.py | d019a4a10bd4d593f40e1b1694dac8e0692e7743 | [] | no_license | sandeepmchary/Devops_wordpress_Notes | bdcd85d526780d03c494ecb93e714e7ffe0a4d58 | ffd2092162073e1e7342c6066d023d04e6ca8c1c | refs/heads/master | 2022-06-18T21:33:02.471025 | 2022-06-12T11:14:47 | 2022-06-12T11:14:47 | 154,679,658 | 1 | 4 | null | 2022-05-19T16:59:57 | 2018-10-25T13:51:40 | HTML | UTF-8 | Python | false | false | 378 | py | import boto3
ec2_re=boto3.resource('ec2')
ec2_cli=boto3.client('ec2')
all_ins_id=[]
for each in ec2_re.instances.all():
print(each.id,each.state['Name'])
all_ins_id.append(each.id)
#print(all_ins_id)
print("Stopping all instances...")
ec2_re.instances.stop()
waiter=ec2_cli.get_waiter('instance_stopped')
waiter.wait(InstanceIds=all_ins_id)
print("All instances are stopped")
| [
"[email protected]"
] | |
88a5b2b4fd0da877b9c61c64b8cbb25dec8a8493 | d64289adc0908134bf97cbce2d9c5f305a8042d0 | /groupdocs_conversion_cloud/models/otp_load_options.py | 5a17f2da8e57ddbe68c5919dc2cdfdf15c5f4a30 | [
"MIT"
] | permissive | groupdocs-conversion-cloud/groupdocs-conversion-cloud-python | 07cfdabb6584e4f9835c25ff96a6053ef3a54596 | 496f307bc0b776314fd5f56781fb0e71b0b4985e | refs/heads/master | 2023-08-30T22:12:43.070658 | 2023-08-23T17:08:13 | 2023-08-23T17:08:13 | 179,628,452 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,622 | py | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="OtpLoadOptions.py">
# Copyright (c) 2003-2023 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
from groupdocs_conversion_cloud.models import PresentationLoadOptions
class OtpLoadOptions(PresentationLoadOptions):
"""
Otp load options
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self, **kwargs): # noqa: E501
"""Initializes new instance of OtpLoadOptions""" # noqa: E501
base = super(OtpLoadOptions, self)
base.__init__(**kwargs)
self.swagger_types.update(base.swagger_types)
self.attribute_map.update(base.attribute_map)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OtpLoadOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
9d04ced9a977b0813099e3e048af8c764a92ffc9 | 1d88ed99e2f01b6b0faa7acf762543f41569380c | /top/table/tests/test_returns.py | 7264e71d32ed00d88f79d614faa8c1cfeef75cdb | [] | no_license | loum/top | be0ae6951ed7d5834d14f96403d6cd1dc9d008a4 | 4d9aae6297793822b6de28b65f7639a4b2e6dcfa | refs/heads/master | 2016-09-10T10:38:41.312180 | 2014-06-05T05:44:48 | 2014-06-05T05:44:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,891 | py | import unittest2
import os
import datetime
import top
class TestReturns(unittest2.TestCase):
@classmethod
def setUpClass(cls):
cls._r = top.Returns()
cls._db = top.DbSession()
cls._db.connect()
db = cls._db
fixture_dir = os.path.join('top', 'tests', 'fixtures')
fixtures = [{'db': db.returns_reference,
'fixture': 'returns_reference.py'},
{'db': db.returns, 'fixture': 'returns.py'},
{'db': db.agent, 'fixture': 'agents.py'}]
for i in fixtures:
fixture_file = os.path.join(fixture_dir, i['fixture'])
db.load_fixture(i['db'], fixture_file)
# Update the returns created_ts.
cls._now = str(datetime.datetime.now()).split('.')[0]
sql = """UPDATE returns
SET created_ts = '%s'""" % cls._now
db(sql)
db.commit()
def test_init(self):
"""Placeholder test to make sure the Returns table is created.
"""
msg = 'Object is not an top.Returns'
self.assertIsInstance(self._r, top.Returns, msg)
def test_extract_id_sql(self):
"""Verify the extract_id_sql string.
"""
returns_id = 2
sql = self._db.returns.extract_id_sql(returns_id)
self._db(sql)
received = list(self._db.rows())
expected = [('[email protected]',
'0431602145',
'%s' % self._now,
'Bunters We Never Sleep News + Deli',
'693 Albany Hwy',
'Victoria Park',
'6101',
'WA')]
msg = 'extract_id_sql returned values error'
self.assertListEqual(received, expected, msg)
@classmethod
def tearDownClass(cls):
cls._db.disconnect()
cls._db = None
cls._r = None
| [
"[email protected]"
] | |
ba3910d957182f1266449984de846549adfd32bc | bc441bb06b8948288f110af63feda4e798f30225 | /database_delivery_sdk/model/inspection/collector_pb2.py | 11b6c11be52fbd835afa5a03782fb0095af50566 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 4,328 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: collector.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from database_delivery_sdk.model.inspection import arg_pb2 as database__delivery__sdk_dot_model_dot_inspection_dot_arg__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='collector.proto',
package='inspection',
syntax='proto3',
serialized_options=_b('ZDgo.easyops.local/contracts/protorepo-models/easyops/model/inspection'),
serialized_pb=_b('\n\x0f\x63ollector.proto\x12\ninspection\x1a\x30\x64\x61tabase_delivery_sdk/model/inspection/arg.proto\"y\n\x13InspectionCollector\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\t\x12\x0e\n\x06script\x18\x04 \x01(\t\x12\'\n\x04\x61rgs\x18\x05 \x03(\x0b\x32\x19.inspection.InspectionArgBFZDgo.easyops.local/contracts/protorepo-models/easyops/model/inspectionb\x06proto3')
,
dependencies=[database__delivery__sdk_dot_model_dot_inspection_dot_arg__pb2.DESCRIPTOR,])
_INSPECTIONCOLLECTOR = _descriptor.Descriptor(
name='InspectionCollector',
full_name='inspection.InspectionCollector',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='inspection.InspectionCollector.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='inspection.InspectionCollector.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='content', full_name='inspection.InspectionCollector.content', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='script', full_name='inspection.InspectionCollector.script', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='args', full_name='inspection.InspectionCollector.args', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=81,
serialized_end=202,
)
_INSPECTIONCOLLECTOR.fields_by_name['args'].message_type = database__delivery__sdk_dot_model_dot_inspection_dot_arg__pb2._INSPECTIONARG
DESCRIPTOR.message_types_by_name['InspectionCollector'] = _INSPECTIONCOLLECTOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
InspectionCollector = _reflection.GeneratedProtocolMessageType('InspectionCollector', (_message.Message,), {
'DESCRIPTOR' : _INSPECTIONCOLLECTOR,
'__module__' : 'collector_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionCollector)
})
_sym_db.RegisterMessage(InspectionCollector)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
f72712339cc187c4574fc8ad4a91106b89cb5f39 | 0f49a5e1daeb09742f87717e8f4849b87b1a9c44 | /src/dialogs/DialogUmlNodeEdit.py | 5c9ed16d21e86fbde26a233e0482e29a244a94d7 | [] | no_license | arita37/pynsource | 7ddc717972e3c8f8a1225f4d9ba196e03bfee5df | 57ed39ba112d97fc0af09669d6647952f6ae1e7c | refs/heads/master | 2021-02-04T09:03:10.796578 | 2020-02-06T06:31:19 | 2020-02-06T06:31:19 | 243,646,965 | 1 | 0 | null | 2020-02-28T00:42:02 | 2020-02-28T00:42:00 | null | UTF-8 | Python | false | false | 3,837 | py | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Oct 26 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class DialogUmlNodeEdit
###########################################################################
class DialogUmlNodeEdit ( wx.Dialog ):
def __init__( self, parent ):
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"Uml Node Properties", pos = wx.DefaultPosition, size = wx.Size( 342,469 ), style = wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
bSizer9 = wx.BoxSizer( wx.VERTICAL )
self.m_panel2 = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
bSizer11 = wx.BoxSizer( wx.VERTICAL )
bSizer12 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText1 = wx.StaticText( self.m_panel2, wx.ID_ANY, u"Class Name", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText1.Wrap( -1 )
self.m_staticText1.SetMinSize( wx.Size( 55,-1 ) )
bSizer12.Add( self.m_staticText1, 1, wx.ALL, 5 )
self.txtClassName = wx.TextCtrl( self.m_panel2, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PROCESS_ENTER )
self.txtClassName.SetMaxLength( 0 )
bSizer12.Add( self.txtClassName, 3, wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, 5 )
bSizer11.Add( bSizer12, 0, wx.EXPAND, 5 )
bSizer14 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText2 = wx.StaticText( self.m_panel2, wx.ID_ANY, u"Attributes", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
self.m_staticText2.SetMinSize( wx.Size( 55,-1 ) )
bSizer14.Add( self.m_staticText2, 1, wx.ALL, 5 )
self.txtAttrs = wx.TextCtrl( self.m_panel2, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_MULTILINE )
# self.txtAttrs.SetMaxLength( 0 )
bSizer14.Add( self.txtAttrs, 3, wx.ALL|wx.EXPAND, 5 )
bSizer11.Add( bSizer14, 2, wx.EXPAND, 5 )
bSizer13 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText3 = wx.StaticText( self.m_panel2, wx.ID_ANY, u"Methods", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText3.Wrap( -1 )
self.m_staticText3.SetMinSize( wx.Size( 55,-1 ) )
bSizer13.Add( self.m_staticText3, 1, wx.ALL, 5 )
self.txtMethods = wx.TextCtrl( self.m_panel2, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_MULTILINE )
# self.txtMethods.SetMaxLength( 0 )
bSizer13.Add( self.txtMethods, 3, wx.ALL|wx.EXPAND, 5 )
bSizer11.Add( bSizer13, 2, wx.EXPAND, 5 )
bSizer4 = wx.BoxSizer( wx.HORIZONTAL )
bSizer5 = wx.BoxSizer( wx.VERTICAL )
self.m_button1 = wx.Button( self.m_panel2, wx.ID_CANCEL, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer5.Add( self.m_button1, 0, wx.ALL|wx.EXPAND, 5 )
bSizer4.Add( bSizer5, 1, wx.ALIGN_CENTER_VERTICAL, 5 )
bSizer6 = wx.BoxSizer( wx.VERTICAL )
self.m_button2 = wx.Button( self.m_panel2, wx.ID_OK, u"OK", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer6.Add( self.m_button2, 0, wx.ALL|wx.EXPAND, 5 )
bSizer4.Add( bSizer6, 1, wx.ALIGN_CENTER_VERTICAL, 5 )
bSizer11.Add( bSizer4, 1, wx.ALIGN_CENTER_HORIZONTAL, 5 )
self.m_panel2.SetSizer( bSizer11 )
self.m_panel2.Layout()
bSizer11.Fit( self.m_panel2 )
bSizer9.Add( self.m_panel2, 1, wx.EXPAND |wx.ALL, 5 )
self.SetSizer( bSizer9 )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.txtClassName.Bind( wx.EVT_TEXT_ENTER, self.OnClassNameEnter )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def OnClassNameEnter( self, event ):
event.Skip()
| [
"[email protected]"
] | |
f8e9da36ddbcc3f59610866194a1676bdff287ad | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/90/usersdata/218/60729/submittedfiles/matriz2.py | 9c1a3e9aea2e3f724466e0e40fb3cef9566de708 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # -*- coding: utf-8 -*-
import numpy as np
def somas (a):
lista=[]
cont=0
for i in range (0,a.shape[0],1):
cont=0
for j in range (0,a.shape[1],1):
cont=cont+a[i,j]
lista.append(cont)
for j in range (0,a.shape[1],1):
cont=0
for i in range (0,a.shape[0],1):
cont=cont+a[i,j]
lista.append(cont)
cont=0
for i in range (0,a.shape[0],1):
for j in range (0,a.shape[1],1):
if i==j:
cont=cont+a[i,j]
lista.append(cont)
i=0
j=a.shape[1]-1
cont=0
while j>=0:
cont=cont+a[i,j]
i=i+1
j=j-1
lista.append(cont)
for i in range (1,len(lista),1):
if lista[i]!=lista[i-1]:
return False
return True
n=int(input('digite as dimensões da matriz quadrada:'))
a=np.zeros((n,n))
for i in range (0,a.shape[0],1):
for j in range (0, a.shape[1],1):
a[i,j]=float(input('digite o elemento:'))
if somas(a):
print('S')
else:
print('N')_ | [
"[email protected]"
] | |
d69bd9f188b9854c76763631719ecfe14836d385 | 4b232f6f94097644b8c052a8be5dae83644a3d0c | /tractseg/libs/fiber_utils.py | d60707d50395644098468a2a4285a9eb5d00a203 | [
"Apache-2.0"
] | permissive | Nat-Sci/TractSeg | 40e0bafc1ed50bdf09c26977e28a202e09f6bccb | 506935488436951546a96358958cfb6752145c0d | refs/heads/master | 2022-11-20T08:06:34.322247 | 2020-07-16T16:09:12 | 2020-07-16T16:09:12 | 281,930,134 | 0 | 0 | Apache-2.0 | 2020-07-23T11:14:10 | 2020-07-23T11:14:09 | null | UTF-8 | Python | false | false | 15,560 | py |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import multiprocessing
from os import getpid
import numpy as np
import nibabel as nib
from dipy.tracking.streamline import compress_streamlines as compress_streamlines_dipy
from dipy.segment.metric import ResampleFeature
from dipy.tracking.metrics import spline
from dipy.tracking import utils as utils_trk
from dipy.tracking.streamline import transform_streamlines
from dipy.tracking.streamline import set_number_of_points
from dipy.tracking.streamline import length as sl_length
from tractseg.libs import utils
from tractseg.libs import peak_utils
# Global variables needed for shared memory of parallel fiber compression
global _COMPRESSION_ERROR_THRESHOLD
_COMPRESSION_ERROR_THRESHOLD = None
global _FIBER_BATCHES
_FIBER_BATCHES = None
def compress_fibers_worker_shared_mem(idx):
"""
Worker Functions for multithreaded compression.
Function that runs in parallel must be on top level (not in class/function) otherwise it can
not be pickled.
"""
streamlines_chunk = _FIBER_BATCHES[idx] # shared memory; by using indices each worker accesses only his part
result = compress_streamlines_dipy(streamlines_chunk, tol_error=_COMPRESSION_ERROR_THRESHOLD)
# print('PID {}, DONE'.format(getpid()))
return result
def compress_streamlines(streamlines, error_threshold=0.1, nr_cpus=-1):
import psutil
if nr_cpus == -1:
nr_processes = psutil.cpu_count()
else:
nr_processes = nr_cpus
number_streamlines = len(streamlines)
if nr_processes >= number_streamlines:
nr_processes = number_streamlines - 1
if nr_processes < 1:
nr_processes = 1
chunk_size = int(number_streamlines / nr_processes)
if chunk_size < 1:
return streamlines
fiber_batches = list(utils.chunks(streamlines, chunk_size))
global _COMPRESSION_ERROR_THRESHOLD
global _FIBER_BATCHES
_COMPRESSION_ERROR_THRESHOLD = error_threshold
_FIBER_BATCHES = fiber_batches
# print("Main program using: {} GB".format(round(Utils.mem_usage(print_usage=False), 3)))
pool = multiprocessing.Pool(processes=nr_processes)
#Do not pass in data (doubles amount of memory needed), but only idx of shared memory
# (needs only as much memory as single thread version (only main thread needs memory, others almost 0).
# Shared memory version also faster (around 20-30%?).
result = pool.map(compress_fibers_worker_shared_mem, range(0, len(fiber_batches)))
pool.close()
pool.join()
streamlines_c = utils.flatten(result)
return streamlines_c
def save_streamlines_as_trk_legacy(out_file, streamlines, affine, shape):
"""
This function saves tracts in Trackvis '.trk' format.
Uses the old nib.trackvis API (streamlines are saved in coordinate space. Affine is not applied.)
Args:
out_file: string with filepath of the output file
streamlines: sequence of streamlines in RASmm coordinate (list of 2D numpy arrays)
affine: 4d array with voxel to RASmm transformation
shape: 1d array with dimensions of the brain volume, default [145, 174, 145]
Returns:
void
"""
affine = np.abs(affine)
#offset not needed (already part of streamline coordinates?)
affine[0, 3] = 0
affine[1, 3] = 0
affine[2, 3] = 0
# Make a trackvis header so we can save streamlines
trackvis_header = nib.trackvis.empty_header()
trackvis_header['voxel_order'] = 'RAS'
trackvis_header['dim'] = shape
nib.trackvis.aff_to_hdr(affine, trackvis_header, pos_vox=False, set_order=False)
streamlines_trk_format = [(sl, None, None) for sl in streamlines]
nib.trackvis.write(out_file, streamlines_trk_format, trackvis_header, points_space="rasmm")
def save_streamlines(out_file, streamlines, affine=None, shape=None, vox_sizes=None, vox_order='RAS'):
"""
Saves streamlines either in .trk format or in .tck format. Depending on the ending of out_file.
If using .trk: This function saves tracts in Trackvis '.trk' format.
The default values for the parameters are the values for the HCP data.
The HCP default affine is: array([[ -1.25, 0. , 0. , 90. ],
[ 0. , 1.25, 0. , -126. ],
[ 0. , 0. , 1.25, -72. ],
[ 0. , 0. , 0. , 1. ]],
dtype=float32)
Uses the new nib.streamlines API (streamlines are saved in voxel space and affine is applied to transform them to
coordinate space).
todo: use dipy.io.streamline.save_tractogram to save streamlines
Args:
out_file: string with filepath of the output file
streamlines: sequence of streamlines in RASmm coordinate
affine: 4d array with voxel to RASmm transformation
shape: 1d array with dimensions of the brain volume, default [145, 174, 145]
vox_sizes: 1d array with the voxels sizes, if None takes the absolute values of the diagonal of the affine
vox_order: orientation convention, default to 'LAS'
Returns:
void
"""
if affine is None:
affine = np.array([[-1.25, 0., 0., 90.],
[0., 1.25, 0., -126.],
[0., 0., 1.25, -72.],
[0., 0., 0., 1.]],
dtype=np.float32)
if shape is None:
shape = np.array([145, 174, 145], dtype=np.int16)
if vox_sizes is None:
vox_sizes = np.array([abs(affine[0,0]), abs(affine[1,1]), abs(affine[2,2])], dtype=np.float32)
# Create a new header with the correct affine and nr of streamlines
hdr = {}
hdr['voxel_sizes'] = vox_sizes
hdr['voxel_order'] = vox_order
hdr['dimensions'] = shape
hdr['voxel_to_rasmm'] = affine
hdr['nb_streamlines'] = len(streamlines)
nib.streamlines.save(nib.streamlines.Tractogram(streamlines, affine_to_rasmm=np.eye(4)), out_file, header=hdr)
def convert_tck_to_trk(filename_in, filename_out, reference_affine, reference_shape,
compress_err_thr=0.1, smooth=None, nr_cpus=-1, tracking_format="trk_legacy"):
streamlines = nib.streamlines.load(filename_in).streamlines # Load Fibers (Tck)
if smooth is not None:
streamlines_smooth = []
for sl in streamlines:
streamlines_smooth.append(spline(sl, s=smooth))
streamlines = streamlines_smooth
#Compressing also good to remove checkerboard artefacts from tracking on peaks
if compress_err_thr is not None:
streamlines = compress_streamlines(streamlines, compress_err_thr, nr_cpus=nr_cpus)
if tracking_format == "trk_legacy":
save_streamlines_as_trk_legacy(filename_out, streamlines, reference_affine, reference_shape)
else:
save_streamlines(filename_out, streamlines, reference_affine, reference_shape)
def create_empty_tractogram(filename_out, reference_file,
tracking_format="trk_legacy"):
ref_img = nib.load(reference_file)
reference_affine = ref_img.affine
reference_shape = ref_img.get_data().shape[:3]
streamlines = []
if tracking_format == "trk_legacy":
save_streamlines_as_trk_legacy(filename_out, streamlines, reference_affine, reference_shape)
else:
save_streamlines(filename_out, streamlines, reference_affine, reference_shape)
def resample_fibers(streamlines, nb_points=12):
streamlines_new = []
for sl in streamlines:
feature = ResampleFeature(nb_points=nb_points)
streamlines_new.append(feature.extract(sl))
return streamlines_new
def smooth_streamlines(streamlines, smoothing_factor=10):
"""
Smooth streamlines
Args:
streamlines: list of streamlines
smoothing_factor: 10: slight smoothing, 100: very smooth from beginning to end
Returns:
smoothed streamlines
"""
streamlines_smooth = []
for sl in streamlines:
streamlines_smooth.append(spline(sl, s=smoothing_factor))
return streamlines_smooth
def get_streamline_statistics(streamlines, subsample=False, raw=False):
"""
Get streamlines statistics in mm
Args:
streamlines: list of streamlines
subsample: Do not evaluate all points to increase runtime
raw: if True returns list of fibers length and spaces
Returns:
(mean streamline length, mean space between two following points, max space between two following points)
"""
if subsample: # subsample for faster processing
STEP_SIZE = 20
else:
STEP_SIZE = 1
lengths = []
spaces = [] # spaces between 2 points
for j in range(0, len(streamlines), STEP_SIZE):
sl = streamlines[j]
length = 0
for i in range(len(sl)):
if i < (len(sl)-1):
space = np.linalg.norm(sl[i+1] - sl[i])
spaces.append(space)
length += space
lengths.append(length)
if raw:
return lengths, spaces
else:
return np.array(lengths).mean(), np.array(spaces).mean(), np.array(spaces).max()
def filter_streamlines_leaving_mask(streamlines, mask):
"""
Remove all streamlines that exit the mask
"""
max_seq_len = 0.1
streamlines = list(utils_trk.subsegment(streamlines, max_seq_len))
new_str_idxs = []
for i, streamline in enumerate(streamlines):
new_str_idxs.append(i)
for point in streamline:
if mask[int(point[0]), int(point[1]), int(point[2])] == 0:
new_str_idxs.pop()
break
return [streamlines[idx] for idx in new_str_idxs]
def get_best_original_peaks(peaks_pred, peaks_orig, peak_len_thr=0.1):
"""
Find the peak from preaks_orig which is closest to the peak in peaks_pred.
Args:
peaks_pred: file containing 1 peak [x,y,z,3]
peaks_orig: file containing 4 peaks [x,y,z,9]
peak_len_thr: all peaks shorter than this threshold will be removed
Returns:
Image containing 1 peak [x,y,z,3]
"""
def _get_most_aligned_peak(pred, orig):
orig = np.array(orig)
angle1 = abs(peak_utils.angle_last_dim(pred, orig[0]))
angle2 = abs(peak_utils.angle_last_dim(pred, orig[1]))
angle3 = abs(peak_utils.angle_last_dim(pred, orig[2]))
argmax = np.argmax(np.stack([angle1, angle2, angle3], axis=-1), axis=-1)
x, y, z = (orig.shape[1], orig.shape[2], orig.shape[3])
return orig[tuple([argmax] + np.ogrid[:x, :y, :z])]
# Other ways that would also work
# return orig[argmax, np.arange(x)[:, None, None], np.arange(y)[:, None], np.arange(z)]
# return np.take_along_axis(orig, argmax[None, ..., None], axis=0)[0] # only supported in newest numpy version
peaks_pred = np.nan_to_num(peaks_pred)
peaks_orig = np.nan_to_num(peaks_orig)
#Remove all peaks where predicted peaks are too short
peaks_orig[np.linalg.norm(peaks_pred, axis=-1) < peak_len_thr] = 0
best_orig = _get_most_aligned_peak(peaks_pred,
[peaks_orig[:, :, :, 0:3],
peaks_orig[:, :, :, 3:6],
peaks_orig[:, :, :, 6:9]])
return best_orig
def get_weighted_mean_of_peaks(best_orig, tom, weight=0.5):
"""
Calculate weighted mean between best_orig peaks and TOM peaks.
Args:
best_orig: original peaks
tom: prior
weight: how much to apply prior (0: only original signal, 1: only prior)
Returns:
weighted mean
"""
angles = peak_utils.angle_last_dim(best_orig, tom)
# make sure to take mean along smaller angle (<90 degree), not along the bigger one (>90 degree)
tom[angles < 0] *= -1 # flip peak
stacked = np.stack([best_orig, tom])
return np.average(stacked, axis=0, weights=[1 - weight, weight])
def add_to_each_streamline(streamlines, scalar):
"""
Add scalar value to each coordinate of each streamline
"""
sl_new = []
for sl in streamlines:
sl_new.append(np.array(sl) + scalar)
return sl_new
def add_to_each_streamline_axis(streamlines, scalar, axis="x"):
sl_new = []
for sl in streamlines:
s = np.array(sl)
if axis == "x":
s[:, 0] += scalar
elif axis == "y":
s[:, 1] += scalar
elif axis == "z":
s[:, 2] += scalar
sl_new.append(s)
return sl_new
def flip(streamlines, axis="x"):
new_sl = []
for sl in streamlines:
tmp = np.copy(sl)
if axis == "x":
tmp[:, 0] = tmp[:, 0] * -1
elif axis == "y":
tmp[:, 1] = tmp[:, 1] * -1
elif axis == "z":
tmp[:, 2] = tmp[:, 2] * -1
else:
raise ValueError("Unsupported axis")
new_sl.append(tmp)
return new_sl
def transform_point(p, affine):
"""
Apply affine to point p
Args:
p: [x, y, z]
affine: [4x4] matrix
Returns:
[x, y, z]
"""
M = affine[:3, :3]
offset = affine[:3, 3]
return M.dot(p) + offset
def invert_streamlines(streamlines, reference_img, affine, axis="x"):
"""
Invert streamlines. If inverting image voxel order (img[::-1]) we can do this inversion to the streamlines and
the result properly fits to the inverted image.
Args:
streamlines:
reference_img: 3d array
affine: 4x4 matrix
axis: x | y | z
Returns:
streamlines
"""
img_shape = np.array(reference_img.shape)
img_center_voxel_space = (img_shape - 1) / 2.
img_center_mm_space = transform_point(img_center_voxel_space, affine)
# affine_invert = np.eye(4)
affine_invert = np.copy(affine)
affine_invert[0, 3] = 0
affine_invert[1, 3] = 0
affine_invert[2, 3] = 0
affine_invert[0, 0] = 1
affine_invert[1, 1] = 1
affine_invert[2, 2] = 1
if axis == "x":
affine_invert[0, 0] = -1
affine_invert[0, 3] = img_center_mm_space[1] * 2
elif axis == "y":
affine_invert[1, 1] = -1
affine_invert[1, 3] = img_center_mm_space[1] * 2
elif axis == "z":
affine_invert[2, 2] = -1
affine_invert[2, 3] = img_center_mm_space[1] * 2
else:
raise ValueError("invalid axis")
print(affine_invert)
return list(transform_streamlines(streamlines, affine_invert))
def resample_to_same_distance(streamlines, max_nr_points=10, ANTI_INTERPOL_MULT=1):
dist = sl_length(streamlines).max() / max_nr_points
new_streamlines = []
for sl in streamlines:
l = sl_length(sl)
nr_segments = int(l / dist)
sl_new = set_number_of_points(sl, nb_points=nr_segments * ANTI_INTERPOL_MULT)
new_streamlines.append(sl_new)
return new_streamlines
def pad_sl_with_zeros(streamlines, target_len, pad_point):
new_streamlines = []
for sl in streamlines:
new_sl = list(sl) + [pad_point] * (target_len - len(sl))
new_streamlines.append(new_sl)
return new_streamlines
def get_idxs_of_closest_points(streamlines, target_point):
idxs = []
for sl in streamlines:
dists = []
for idx, p in enumerate(sl):
dist = abs(np.linalg.norm(p - target_point))
dists.append(dist)
idxs.append(np.array(dists).argmin())
return idxs
| [
"[email protected]"
] | |
bf1a7529e2315658f50cc0c20cf59fc4d0d940fd | ababed0e1a54f4888440edd20cbfdf6beb3cd20d | /backend/menu/api/v1/viewsets.py | 67ee4177ad30850e6ca22a7f0ce52813b2fbd299 | [] | no_license | crowdbotics-apps/asd-18920 | 1766fa6c4334d1c1f8a97f6aa4da8961742bcc2c | 5022fcd5f0b0522ce53438270e2ae65d1b652dab | refs/heads/master | 2022-11-17T15:02:25.688634 | 2020-07-15T16:09:00 | 2020-07-15T16:09:00 | 279,915,037 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,555 | py | from rest_framework import authentication
from menu.models import ItemVariant, Country, Item, Category, Review
from .serializers import (
ItemVariantSerializer,
CountrySerializer,
ItemSerializer,
CategorySerializer,
ReviewSerializer,
)
from rest_framework import viewsets
class CategoryViewSet(viewsets.ModelViewSet):
serializer_class = CategorySerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Category.objects.all()
class ItemViewSet(viewsets.ModelViewSet):
serializer_class = ItemSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Item.objects.all()
class ItemVariantViewSet(viewsets.ModelViewSet):
serializer_class = ItemVariantSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = ItemVariant.objects.all()
class ReviewViewSet(viewsets.ModelViewSet):
serializer_class = ReviewSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Review.objects.all()
class CountryViewSet(viewsets.ModelViewSet):
serializer_class = CountrySerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Country.objects.all()
| [
"[email protected]"
] | |
efcd1f6da93da3bf75a7685d2f07a65c5f588702 | 52ab2da7b131643a344ee5344d8f35aebd6e2eed | /WebProject1/myvenv/lib/python3.6/site-packages/sqlalchemy/event/legacy.py | 049df81aafa7b163be69d6561ad9e9e6ff7ef534 | [
"MIT"
] | permissive | ucsb-cs48-w19/5pm-findtheroommate | cd6db6c4cf3ee6f159b04456ba13b1ef684c7546 | d9d01b95c478e7493b5b32c8b56ceed00578b188 | refs/heads/master | 2020-04-16T01:00:16.617610 | 2019-03-19T20:42:38 | 2019-03-19T20:42:38 | 165,158,037 | 2 | 1 | MIT | 2019-03-05T00:46:12 | 2019-01-11T01:28:11 | Python | UTF-8 | Python | false | false | 5,900 | py | # event/legacy.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Routines to handle adaption of legacy call signatures,
generation of deprecation notes and docstrings.
"""
from .. import util
def _legacy_signature(since, argnames, converter=None):
def leg(fn):
if not hasattr(fn, "_legacy_signatures"):
fn._legacy_signatures = []
fn._legacy_signatures.append((since, argnames, converter))
return fn
return leg
def _wrap_fn_for_legacy(dispatch_collection, fn, argspec):
for since, argnames, conv in dispatch_collection.legacy_signatures:
if argnames[-1] == "**kw":
has_kw = True
argnames = argnames[0:-1]
else:
has_kw = False
if len(argnames) == len(argspec.args) and has_kw is bool(
argspec.keywords
):
if conv:
assert not has_kw
def wrap_leg(*args):
return fn(*conv(*args))
else:
def wrap_leg(*args, **kw):
argdict = dict(zip(dispatch_collection.arg_names, args))
args = [argdict[name] for name in argnames]
if has_kw:
return fn(*args, **kw)
else:
return fn(*args)
return wrap_leg
else:
return fn
def _indent(text, indent):
return "\n".join(indent + line for line in text.split("\n"))
def _standard_listen_example(dispatch_collection, sample_target, fn):
example_kw_arg = _indent(
"\n".join(
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
for arg in dispatch_collection.arg_names[0:2]
),
" ",
)
if dispatch_collection.legacy_signatures:
current_since = max(
since
for since, args, conv in dispatch_collection.legacy_signatures
)
else:
current_since = None
text = (
"from sqlalchemy import event\n\n"
"# standard decorator style%(current_since)s\n"
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
"def receive_%(event_name)s("
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n"
)
if len(dispatch_collection.arg_names) > 3:
text += (
"\n# named argument style (new in 0.9)\n"
"@event.listens_for("
"%(sample_target)s, '%(event_name)s', named=True)\n"
"def receive_%(event_name)s(**kw):\n"
" \"listen for the '%(event_name)s' event\"\n"
"%(example_kw_arg)s\n"
"\n # ... (event handling logic) ...\n"
)
text %= {
"current_since": " (arguments as of %s)" % current_since
if current_since
else "",
"event_name": fn.__name__,
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
"example_kw_arg": example_kw_arg,
"sample_target": sample_target,
}
return text
def _legacy_listen_examples(dispatch_collection, sample_target, fn):
text = ""
for since, args, conv in dispatch_collection.legacy_signatures:
text += (
"\n# DEPRECATED calling style (pre-%(since)s, "
"will be removed in a future release)\n"
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
"def receive_%(event_name)s("
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n"
% {
"since": since,
"event_name": fn.__name__,
"has_kw_arguments": " **kw"
if dispatch_collection.has_kw
else "",
"named_event_arguments": ", ".join(args),
"sample_target": sample_target,
}
)
return text
def _version_signature_changes(parent_dispatch_cls, dispatch_collection):
since, args, conv = dispatch_collection.legacy_signatures[0]
return (
"\n.. deprecated:: %(since)s\n"
" The :class:`.%(clsname)s.%(event_name)s` event now accepts the \n"
" arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
" Support for listener functions which accept the previous \n"
" argument signature(s) listed above as \"deprecated\" will be \n"
" removed in a future release."
% {
"since": since,
"clsname": parent_dispatch_cls.__name__,
"event_name": dispatch_collection.name,
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
}
)
def _augment_fn_docs(dispatch_collection, parent_dispatch_cls, fn):
header = (
".. container:: event_signatures\n\n"
" Example argument forms::\n"
"\n"
)
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
text = header + _indent(
_standard_listen_example(dispatch_collection, sample_target, fn),
" " * 8,
)
if dispatch_collection.legacy_signatures:
text += _indent(
_legacy_listen_examples(dispatch_collection, sample_target, fn),
" " * 8,
)
text += _version_signature_changes(
parent_dispatch_cls, dispatch_collection)
return util.inject_docstring_text(fn.__doc__, text, 1)
| [
"[email protected]"
] | |
399b9de55761113ccab69d01a9bd68a988a88fe6 | bed40794a78225e070c49a72209d447757ec8343 | /python_crawl/section3-1.py | 3a3113bc3a6b4a52b626ac8857b76dd0fe10852d | [] | no_license | moorekwon/crawling-practice | 458ffe31b3a7a91fad6547ef76f9a428376d542f | c5bc989ced353daed34d53410c261ce4d4561d4c | refs/heads/master | 2022-09-13T06:46:31.034858 | 2020-05-31T06:43:28 | 2020-05-31T06:43:28 | 266,505,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,094 | py | import urllib.request
from urllib.parse import urlparse
import sys
import io
sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding = 'utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding = 'utf-8')
url = 'http://www.encar.com'
mem = urllib.request.urlopen(url)
# print('type(mem) >> ', type(mem))
# print('mem.geturl() >> ', mem.geturl())
# print('mem.status >> ', mem.status)
# print('mem.getheaders() >> ', mem.getheaders())
# print('mem.getcode() >> ', mem.getcode())
# print("mem.read(200).decode('euc-kr') >> ", mem.read(200).decode('euc-kr'))
# print("urlparse('http://www.encar.com?id=moorekwon&pw=1111').query >> ", urlparse('http://www.encar.com?id=moorekwon&pw=1111').query)
API = 'https://api.ipify.org'
before_params = {
'format': 'json'
}
print('before_params >> ', before_params)
after_params = urllib.parse.urlencode(before_params)
print('after_params >> ', after_params)
URL = API + '?' + after_params
print('URL >> ', URL)
data = urllib.request.urlopen(URL).read()
print('data >> ', data)
text = data.decode('utf-8')
print('text >> ', text) | [
"[email protected]"
] | |
a868176f862739f842c05c4e3447d5f92ff293ac | b95fa99bb1ba2210b73251614d2613363c37f932 | /deploy/dot-product/scripts/dot-67.py | e5a37f1559951e11ad8cf6b305503d2eb3c83ad0 | [] | no_license | lingxiao/learn-adj-relation | d1a8894fefc776ec0bd414b5f038361ed4b79d16 | dc4285af19e53d7e2d015eb6394f6c601c707da0 | refs/heads/master | 2020-12-30T16:27:51.531268 | 2017-06-07T18:59:48 | 2017-06-07T18:59:48 | 87,714,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | ############################################################
# Module : A series of measures on the graph for experiments
# Date : April 2nd, 2017
# Author : Xiao Ling
############################################################
import os
import numpy as np
from utils import *
from scripts import *
from app.config import *
############################################################
'''
paths
'''
batch = 67
# dirs = working_dirs('dot-product',['pairs', 'scripts','shells'])
out_dirs = data_dirs('dot-product', ['outputs'])
word_dirs = working_dirs( 'words'
, [p + '-' + s for p in ['train', 'valid', 'all'] \
for s in ['pairs', 'words'] ])
word_2_vec = get_path('word2vec')
word_2_vec_sm = get_path('word2vec-sm')
current_job = 'all-pairs'
word_pair_path = os.path.join(word_dirs[current_job] , 'batch-' + str(batch) + '.txt')
out_path = os.path.join(out_dirs['outputs'] , current_job + '-' + str(batch) + '.txt')
print('\n>> running dot-' + str(batch) + '.py at ' + current_job)
dot(word_2_vec, word_pair_path, out_path, refresh = True)
| [
"[email protected]"
] | |
1e319b6c7309a5b83ebfe44b591850b2b6962be3 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/e6v.py | 75b5df2cc32148930528e7e89a9f832fba9b69b1 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'e6V':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
b00578cb33932e544783d41cc72fa666201bb10e | 4fd0a43fb3fdbc5ce355b050d0a6506b97bb5d79 | /src/basic/mqtt_client.py | f5bb543c0f8e7a81272e10ad1741abd0064590ae | [] | no_license | Shoumik-Gandre/wn_miniproject | a175e871b0931e6bcb324fbcf81b3dbbd09186e6 | 9a61a27c7eee15486cf688a3d66ceae23f8d5b47 | refs/heads/main | 2023-04-16T13:48:53.383291 | 2021-04-29T13:55:45 | 2021-04-29T13:55:45 | 362,832,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,001 | py | import paho.mqtt.client as mqtt
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code " + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("WN/test")
client.subscribe("WN/topic")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print(msg.topic + " " + str(msg.payload))
def main():
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("test.mosquitto.org", 1883, 60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c2a44a685f68ef321abc531cdd8f556292da5f9c | b797609bb7ddf7b677909b0c5dd0bed618659e9f | /polarcordinates.py | c6ebb77e4553e36b74ec62a7f6a9effc1b7f79e3 | [] | no_license | krishna-rawat-hp/HackerRank-solution-python | fc969a7de3e54be5c90f5877699121714df14821 | a813e76a307046409217eb1773a6b3e0337c142d | refs/heads/master | 2022-12-23T19:59:12.667718 | 2020-09-26T12:35:24 | 2020-09-26T12:35:24 | 288,746,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | import cmath
v = input()
print(abs(complex(v)))
print(cmath.phase(complex(v))) | [
"[email protected]"
] | |
4a5429e17840c12de155763259947c16be4142b8 | 7f9dfa2cccf77764940ffcbbf92939e37c138c43 | /crawl_file/file_path/pylab_examples/anscombe.py | 753e4342916a1d85af7b3ce7af20c14c7110e3c9 | [] | no_license | zhangmman/scrapy_spider | f80bd8d213edde0dea083babe610ca7b1bc449a3 | 2bda4aa29f2550c649c939045ce4fcdea2736187 | refs/heads/master | 2020-09-11T13:58:49.930929 | 2019-12-21T08:40:56 | 2019-12-21T08:43:43 | 222,080,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,934 | py | from __future__ import print_function
"""
Edward Tufte uses this example from Anscombe to show 4 datasets of x
and y that have the same mean, standard deviation, and regression
line, but which are qualitatively different.
matplotlib fun for a rainy day
"""
import matplotlib.pyplot as plt
import numpy as np
x = np.array([10, 8, 13, 9, 11, 14, 6, 4, 12, 7, 5])
y1 = np.array([8.04, 6.95, 7.58, 8.81, 8.33, 9.96, 7.24, 4.26, 10.84, 4.82, 5.68])
y2 = np.array([9.14, 8.14, 8.74, 8.77, 9.26, 8.10, 6.13, 3.10, 9.13, 7.26, 4.74])
y3 = np.array([7.46, 6.77, 12.74, 7.11, 7.81, 8.84, 6.08, 5.39, 8.15, 6.42, 5.73])
x4 = np.array([8, 8, 8, 8, 8, 8, 8, 19, 8, 8, 8])
y4 = np.array([6.58, 5.76, 7.71, 8.84, 8.47, 7.04, 5.25, 12.50, 5.56, 7.91, 6.89])
def fit(x):
return 3 + 0.5*x
xfit = np.array([np.amin(x), np.amax(x)])
plt.subplot(221)
plt.plot(x, y1, 'ks', xfit, fit(xfit), 'r-', lw=2)
plt.axis([2, 20, 2, 14])
plt.setp(plt.gca(), xticklabels=[], yticks=(4, 8, 12), xticks=(0, 10, 20))
plt.text(3, 12, 'I', fontsize=20)
plt.subplot(222)
plt.plot(x, y2, 'ks', xfit, fit(xfit), 'r-', lw=2)
plt.axis([2, 20, 2, 14])
plt.setp(plt.gca(), xticklabels=[], yticks=(4, 8, 12), yticklabels=[], xticks=(0, 10, 20))
plt.text(3, 12, 'II', fontsize=20)
plt.subplot(223)
plt.plot(x, y3, 'ks', xfit, fit(xfit), 'r-', lw=2)
plt.axis([2, 20, 2, 14])
plt.text(3, 12, 'III', fontsize=20)
plt.setp(plt.gca(), yticks=(4, 8, 12), xticks=(0, 10, 20))
plt.subplot(224)
xfit = np.array([np.amin(x4), np.amax(x4)])
plt.plot(x4, y4, 'ks', xfit, fit(xfit), 'r-', lw=2)
plt.axis([2, 20, 2, 14])
plt.setp(plt.gca(), yticklabels=[], yticks=(4, 8, 12), xticks=(0, 10, 20))
plt.text(3, 12, 'IV', fontsize=20)
# verify the stats
pairs = (x, y1), (x, y2), (x, y3), (x4, y4)
for x, y in pairs:
print('mean=%1.2f, std=%1.2f, r=%1.2f' % (np.mean(y), np.std(y), np.corrcoef(x, y)[0][1]))
plt.show()
| [
"[email protected]"
] | |
1504d39ef20e08c04e1cdc4746b68ebbb0bcc192 | d2f91b93ad42aaefa5fc315a9b3a5d45d07fa705 | /slbman/venv/Lib/site-packages/aliyun/api/rest/rds/RdsDescribeDBInstanceClassesRequest.py | 33ce5a2e88dcb9457c34ba33df1ea05109b25fc6 | [] | no_license | junlongzhou5566/managePlatform | 66cb5bc5b176147ff0038819924f7efa8df1d556 | 3201ba1a11b05c86db5f42aa9ca8eaf1cc20e216 | refs/heads/master | 2021-03-29T00:58:23.337808 | 2020-03-17T09:50:21 | 2020-03-17T09:50:21 | 247,910,365 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py | '''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class RdsDescribeDBInstanceClassesRequest(RestApi):
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
def getapiname(self):
return 'rds.aliyuncs.com.DescribeDBInstanceClasses.2013-05-28'
| [
"[email protected]@qq.com"
] | [email protected]@qq.com |
cf5232006921ce80f92f29008e1e66683214afa9 | 4f935960c688bb306a9808b9a0f47480a1a3d33a | /fastai2/callback/cutmix.py | 1f182c8d3701ad765019fd43909d6f91fe436c03 | [
"Apache-2.0"
] | permissive | AccidentalGuru/fastai2 | c3297919f2c2455f8c8a5ee81a5590afe87df34a | e816625945d87c2d9ac6521150f235942912bf74 | refs/heads/master | 2021-05-26T22:59:55.066904 | 2020-04-08T13:18:11 | 2020-04-08T13:18:11 | 254,182,464 | 1 | 0 | Apache-2.0 | 2020-04-08T19:33:43 | 2020-04-08T19:33:42 | null | UTF-8 | Python | false | false | 2,296 | py | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/74_callback.cutmix.ipynb (unless otherwise specified).
__all__ = ['CutMix']
# Cell
from torch.distributions.beta import Beta
from ..vision.all import *
# Cell
class CutMix(Callback):
"Implementation of `https://arxiv.org/abs/1905.04899`"
run_after,run_valid = [Normalize],False
def __init__(self, alpha=1.): self.distrib = Beta(tensor(alpha), tensor(alpha))
def begin_fit(self):
self.stack_y = getattr(self.learn.loss_func, 'y_int', False)
if self.stack_y: self.old_lf,self.learn.loss_func = self.learn.loss_func,self.lf
def after_fit(self):
if self.stack_y: self.learn.loss_func = self.old_lf
def begin_batch(self):
W, H = self.xb[0].size(3), self.xb[0].size(2)
lam = self.distrib.sample((1,)).squeeze().to(self.x.device)
lam = torch.stack([lam, 1-lam])
self.lam = lam.max()
shuffle = torch.randperm(self.y.size(0)).to(self.x.device)
xb1,self.yb1 = tuple(L(self.xb).itemgot(shuffle)),tuple(L(self.yb).itemgot(shuffle))
nx_dims = len(self.x.size())
x1, y1, x2, y2 = self.rand_bbox(W, H, self.lam)
self.learn.xb[0][:, :, x1:x2, y1:y2] = xb1[0][:, :, x1:x2, y1:y2]
self.lam = (1 - ((x2-x1)*(y2-y1))/(W*H)).type(torch.float)
if not self.stack_y:
ny_dims = len(self.y.size())
self.learn.yb = tuple(L(self.yb1,self.yb).map_zip(torch.lerp,weight=unsqueeze(self.lam, n=ny_dims-1)))
def lf(self, pred, *yb):
if not self.training: return self.old_lf(pred, *yb)
with NoneReduce(self.old_lf) as lf:
loss = torch.lerp(lf(pred,*self.yb1), lf(pred,*yb), self.lam)
return reduce_loss(loss, getattr(self.old_lf, 'reduction', 'mean'))
def rand_bbox(self, W, H, lam):
cut_rat = torch.sqrt(1. - lam)
cut_w = (W * cut_rat).type(torch.long)
cut_h = (H * cut_rat).type(torch.long)
# uniform
cx = torch.randint(0, W, (1,)).to(self.x.device)
cy = torch.randint(0, H, (1,)).to(self.x.device)
x1 = torch.clamp(cx - cut_w // 2, 0, W)
y1 = torch.clamp(cy - cut_h // 2, 0, H)
x2 = torch.clamp(cx + cut_w // 2, 0, W)
y2 = torch.clamp(cy + cut_h // 2, 0, H)
return x1, y1, x2, y2 | [
"[email protected]"
] | |
f398d3b06ab094a54439b2f4315ad474a76e55f2 | 6f30245f27a9568155f69648faf148c278136029 | /hhapps/cmd/stock_api.py | dd7da5d6c78ea0bde4abd7f69423a01f7dd4884b | [] | no_license | r202-coe-psu/hh-apps | 82495ffec7fb09155afa4e8f571051aad824acb4 | a15453b7f502a2a71ccb89ba4c4ebe95ef3ca86f | refs/heads/master | 2021-05-03T05:48:40.766349 | 2017-08-06T22:45:30 | 2017-08-06T22:45:30 | 120,584,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py |
from hhapps.stock import api
def main():
options = api.get_program_options()
app = api.create_app()
app.run(
debug=options.debug,
host=options.host,
port=int(options.port)
)
| [
"[email protected]"
] | |
90d4c660d0cd1e00e4e063eff79f31aaaf635e41 | 95ed5173865ea5930ac1f4280e3bce78411ea956 | /examples/plot_wage_education_gender.py | 47305c351dc39a8a18ed0feba01340058cc5e141 | [
"CC-BY-3.0",
"CC-BY-4.0"
] | permissive | FedericoV/stats_in_python_tutorial | c0e99039f8f76e453bf511b99ad906fdf3111509 | 0e7607e36896790eeb3753ecb3b8ee82db206a97 | refs/heads/master | 2021-01-18T00:23:38.240447 | 2015-08-26T13:20:45 | 2015-08-26T13:20:45 | 41,425,683 | 1 | 0 | null | 2015-08-26T12:58:37 | 2015-08-26T12:58:36 | null | UTF-8 | Python | false | false | 2,679 | py | """
Test for an education/gender interaction in wages
==================================================
Wages depend mostly on education. Here we investigate how this dependence
is related to gender: not only does gender create an offset in wages, it
also seems that wages increase more with education for males than
females.
Does our data support this last hypothesis? We will test this using
statsmodels' formulas
(http://statsmodels.sourceforge.net/stable/example_formulas.html).
"""
##############################################################################
# Load and massage the data
import pandas
import urllib
import os
if not os.path.exists('wages.txt'):
# Download the file if it is not present
urllib.urlretrieve('http://lib.stat.cmu.edu/datasets/CPS_85_Wages',
'wages.txt')
# EDUCATION: Number of years of education
# SEX: 1=Female, 0=Male
# WAGE: Wage (dollars per hour)
data = pandas.read_csv('wages.txt', skiprows=27, skipfooter=6, sep=None,
header=None, names=['education', 'gender', 'wage'],
usecols=[0, 2, 5],
)
# Convert genders to strings (this is particulary useful so that the
# statsmodels formulas detects that gender is a categorical variable)
import numpy as np
data['gender'] = np.choose(data.gender, ['male', 'female'])
# Log-transform the wages, because they typically are increased with
# multiplicative factors
data['wage'] = np.log10(data['wage'])
##############################################################################
# simple plotting
import seaborn
# Plot 2 linear fits for male and female.
seaborn.lmplot(y='wage', x='education', hue='gender', data=data)
##############################################################################
# statistical analysis
import statsmodels.formula.api as sm
# Note that this model is not the plot displayed above: it is one
# joined model for male and female, not separate models for male and
# female. The reason is that a single model enables statistical testing
result = sm.ols(formula='wage ~ 1 + education + gender', data=data).fit()
print(result.summary())
# The plots above highlight that there is not only a different offset in
# wage but also a different slope
# We need to model this using an interaction
result = sm.ols(formula='wage ~ 1 + education + gender + education * gender',
data=data).fit()
print(result.summary())
# Looking at the p-value of the interaction of gender and education, the
# data does not support the hypothesis that education benefits males
# more than female (p-value > 0.05).
import matplotlib.pyplot as plt
plt.show()
| [
"[email protected]"
] | |
ab4ddaa525f0af038fd27984ccf21aea86d3a3e9 | 00c1a2bf4f0b9af287f336b8c6f6e52390ce2d6f | /loyihaapp/models.py | 469b15e2afded2764c98c114693a9488e5d9a12f | [] | no_license | bekzod886/Django_loyiha | c038598539e3dd0efe122eceb49d77b3b2145edb | 5c1e0459db3a891ba6bd2a33f51fd575173e8fd8 | refs/heads/main | 2023-07-02T16:39:17.591120 | 2021-08-05T08:16:27 | 2021-08-05T08:16:27 | 391,806,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from django.db import models
# Create your models here.
class Meva(models.Model):
title = models.CharField(max_length=255)
img_url = models.CharField(max_length=255)
desc = models.CharField(max_length=255)
price = models.FloatField()
| [
"[email protected]"
] | |
c4b8dc8085997384c991bd821a50cf250b9e32a6 | f82ca354391c19a753d319b38f8a69369e60f960 | /src/lib/device_detector/parser/client/mobileapp.py | 6ee635612ab5c9fdfe29e806a5fa3a79fa541683 | [
"MIT"
] | permissive | martbhell/wasthereannhlgamelastnight | 8398920ab1b6cf998d8f91ef5598a8e28de57a8d | c40f9f12ed4c066d4f42095e96e9a87a8581d99d | refs/heads/master | 2023-08-10T05:10:46.960500 | 2023-07-18T03:56:22 | 2023-07-18T04:02:33 | 37,021,751 | 5 | 0 | MIT | 2023-08-29T19:16:07 | 2015-06-07T15:38:26 | Python | UTF-8 | Python | false | false | 288 | py | from . import BaseClientParser
class MobileApp(BaseClientParser):
fixture_files = [
'local/client/mobile_apps.yml',
'upstream/client/mobile_apps.yml',
]
def dtype(self):
return self.calculated_dtype or 'mobile app'
__all__ = [
'MobileApp',
]
| [
"[email protected]"
] | |
7347fbf197616979bef0fd4e3c6863d7e6916654 | dd6c21308e1cba24658c8ca7a49e2499cd167da6 | /venv/Lib/site-packages/guardian/utils.py | 5ca593620ebeb53f3e24e8634be26a4a05d93173 | [
"MIT"
] | permissive | ansonsry/Freshshop | 3a53db4d6d0bf1d6705498869a13a3aa7db6ab8c | 79ab8beb1aa993f6365182c8d3bb478ee4e028f8 | refs/heads/master | 2021-06-20T18:54:08.009409 | 2019-07-26T02:56:55 | 2019-07-26T03:02:27 | 198,931,513 | 0 | 0 | MIT | 2021-03-19T22:33:14 | 2019-07-26T02:23:49 | Python | UTF-8 | Python | false | false | 7,153 | py | """
django-guardian helper functions.
Functions defined within this module should be considered as django-guardian's
internal functionality. They are **not** guaranteed to be stable - which means
they actual input parameters/output type may change in future releases.
"""
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.models import AnonymousUser, Group
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.db.models import Model
from django.http import HttpResponseForbidden, HttpResponseNotFound
from django.shortcuts import render_to_response
from django.template import RequestContext
from guardian.compat import get_user_model, remote_model
from guardian.conf import settings as guardian_settings
from guardian.ctypes import get_content_type
from guardian.exceptions import NotUserNorGroup
from itertools import chain
import django
import logging
import os
logger = logging.getLogger(__name__)
abspath = lambda *p: os.path.abspath(os.path.join(*p))
def get_anonymous_user():
"""
Returns ``User`` instance (not ``AnonymousUser``) depending on
``ANONYMOUS_USER_NAME`` configuration.
"""
User = get_user_model()
lookup = {User.USERNAME_FIELD: guardian_settings.ANONYMOUS_USER_NAME}
return User.objects.get(**lookup)
def get_identity(identity):
"""
Returns (user_obj, None) or (None, group_obj) tuple depending on what is
given. Also accepts AnonymousUser instance but would return ``User``
instead - it is convenient and needed for authorization backend to support
anonymous users.
:param identity: either ``User`` or ``Group`` instance
:raises ``NotUserNorGroup``: if cannot return proper identity instance
**Examples**::
>>> from django.contrib.auth.models import User
>>> user = User.objects.create(username='joe')
>>> get_identity(user)
(<User: joe>, None)
>>> group = Group.objects.create(name='users')
>>> get_identity(group)
(None, <Group: users>)
>>> anon = AnonymousUser()
>>> get_identity(anon)
(<User: AnonymousUser>, None)
>>> get_identity("not instance")
...
NotUserNorGroup: User/AnonymousUser or Group instance is required (got )
"""
if isinstance(identity, AnonymousUser):
identity = get_anonymous_user()
if isinstance(identity, get_user_model()):
return identity, None
elif isinstance(identity, Group):
return None, identity
raise NotUserNorGroup("User/AnonymousUser or Group instance is required "
"(got %s)" % identity)
def get_40x_or_None(request, perms, obj=None, login_url=None,
redirect_field_name=None, return_403=False,
return_404=False, accept_global_perms=False):
login_url = login_url or settings.LOGIN_URL
redirect_field_name = redirect_field_name or REDIRECT_FIELD_NAME
# Handles both original and with object provided permission check
# as ``obj`` defaults to None
has_permissions = False
# global perms check first (if accept_global_perms)
if accept_global_perms:
has_permissions = all(request.user.has_perm(perm) for perm in perms)
# if still no permission granted, try obj perms
if not has_permissions:
has_permissions = all(request.user.has_perm(perm, obj)
for perm in perms)
if not has_permissions:
if return_403:
if guardian_settings.RENDER_403:
response = render_to_response(
guardian_settings.TEMPLATE_403, {},
RequestContext(request))
response.status_code = 403
return response
elif guardian_settings.RAISE_403:
raise PermissionDenied
return HttpResponseForbidden()
if return_404:
if guardian_settings.RENDER_404:
response = render_to_response(
guardian_settings.TEMPLATE_404, {},
RequestContext(request))
response.status_code = 404
return response
elif guardian_settings.RAISE_404:
raise ObjectDoesNotExist
return HttpResponseNotFound()
else:
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.get_full_path(),
login_url,
redirect_field_name)
def clean_orphan_obj_perms():
"""
Seeks and removes all object permissions entries pointing at non-existing
targets.
Returns number of removed objects.
"""
from guardian.models import UserObjectPermission
from guardian.models import GroupObjectPermission
deleted = 0
# TODO: optimise
for perm in chain(UserObjectPermission.objects.all().iterator(),
GroupObjectPermission.objects.all().iterator()):
if perm.content_object is None:
logger.debug("Removing %s (pk=%d)" % (perm, perm.pk))
perm.delete()
deleted += 1
logger.info("Total removed orphan object permissions instances: %d" %
deleted)
return deleted
# TODO: should raise error when multiple UserObjectPermission direct relations
# are defined
def get_obj_perms_model(obj, base_cls, generic_cls):
if isinstance(obj, Model):
obj = obj.__class__
ctype = get_content_type(obj)
if django.VERSION >= (1, 8):
fields = (f for f in obj._meta.get_fields()
if (f.one_to_many or f.one_to_one) and f.auto_created)
else:
fields = obj._meta.get_all_related_objects()
for attr in fields:
if django.VERSION < (1, 8):
model = getattr(attr, 'model', None)
else:
model = getattr(attr, 'related_model', None)
if (model and issubclass(model, base_cls) and
model is not generic_cls):
# if model is generic one it would be returned anyway
if not model.objects.is_generic():
# make sure that content_object's content_type is same as
# the one of given obj
fk = model._meta.get_field('content_object')
if ctype == get_content_type(remote_model(fk)):
return model
return generic_cls
def get_user_obj_perms_model(obj):
"""
Returns model class that connects given ``obj`` and User class.
"""
from guardian.models import UserObjectPermissionBase
from guardian.models import UserObjectPermission
return get_obj_perms_model(obj, UserObjectPermissionBase, UserObjectPermission)
def get_group_obj_perms_model(obj):
"""
Returns model class that connects given ``obj`` and Group class.
"""
from guardian.models import GroupObjectPermissionBase
from guardian.models import GroupObjectPermission
return get_obj_perms_model(obj, GroupObjectPermissionBase, GroupObjectPermission)
| [
"[email protected]"
] | |
f2e6015b515e915c24bf44cb57a88c4e12c0939f | e8ae11e5017507da59e2e92d423b6a1994490de4 | /env/lib/python2.7/site-packages/azure/mgmt/scheduler/models/service_bus_brokered_message_properties.py | 336add898c6253ae0f23c0ab299ae3681b024271 | [] | no_license | teopeurt/ansible-ubuntu-server | 613d00cea28bc6531acf4a39aeeb9cd0baa2a391 | b5b6127d2ee9723c5088443efe2ffb8ae30cfea7 | refs/heads/master | 2021-06-28T12:49:50.935753 | 2017-07-31T17:34:33 | 2017-07-31T17:34:33 | 98,912,808 | 0 | 1 | null | 2020-07-24T00:05:31 | 2017-07-31T17:32:56 | Makefile | UTF-8 | Python | false | false | 3,997 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceBusBrokeredMessageProperties(Model):
"""ServiceBusBrokeredMessageProperties.
:param content_type: Gets or sets the content type.
:type content_type: str
:param correlation_id: Gets or sets the correlation id.
:type correlation_id: str
:param force_persistence: Gets or sets the force persistence.
:type force_persistence: bool
:param label: Gets or sets the label.
:type label: str
:param message_id: Gets or sets the message id.
:type message_id: str
:param partition_key: Gets or sets the partition key.
:type partition_key: str
:param reply_to: Gets or sets the reply to.
:type reply_to: str
:param reply_to_session_id: Gets or sets the reply to session id.
:type reply_to_session_id: str
:param scheduled_enqueue_time_utc: Gets or sets the scheduled enqueue
time UTC.
:type scheduled_enqueue_time_utc: datetime
:param session_id: Gets or sets the session id.
:type session_id: str
:param time_to_live: Gets or sets the time to live.
:type time_to_live: datetime
:param to: Gets or sets the to.
:type to: str
:param via_partition_key: Gets or sets the via partition key.
:type via_partition_key: str
"""
_attribute_map = {
'content_type': {'key': 'contentType', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'force_persistence': {'key': 'forcePersistence', 'type': 'bool'},
'label': {'key': 'label', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'partition_key': {'key': 'partitionKey', 'type': 'str'},
'reply_to': {'key': 'replyTo', 'type': 'str'},
'reply_to_session_id': {'key': 'replyToSessionId', 'type': 'str'},
'scheduled_enqueue_time_utc': {'key': 'scheduledEnqueueTimeUtc', 'type': 'iso-8601'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'time_to_live': {'key': 'timeToLive', 'type': 'iso-8601'},
'to': {'key': 'to', 'type': 'str'},
'via_partition_key': {'key': 'viaPartitionKey', 'type': 'str'},
}
def __init__(self, content_type=None, correlation_id=None, force_persistence=None, label=None, message_id=None, partition_key=None, reply_to=None, reply_to_session_id=None, scheduled_enqueue_time_utc=None, session_id=None, time_to_live=None, to=None, via_partition_key=None):
self.content_type = content_type
self.correlation_id = correlation_id
self.force_persistence = force_persistence
self.label = label
self.message_id = message_id
self.partition_key = partition_key
self.reply_to = reply_to
self.reply_to_session_id = reply_to_session_id
self.scheduled_enqueue_time_utc = scheduled_enqueue_time_utc
self.session_id = session_id
self.time_to_live = time_to_live
self.to = to
self.via_partition_key = via_partition_key
| [
"[email protected]"
] | |
3c5eb2c6bf9c023422831c5ca31f64b01a36fb0a | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/86/usersdata/164/57228/submittedfiles/pico.py | 97909e0698fdec2d38710817dcbb466f593e1da4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | # -*- coding: utf-8 -*-
def pico(b):
cres=0
decres=0
for i in range (0, len(b), 1):
c=len(b)/2
if (c>=0):
while (b[i]<b[i]+1):
cres=cres+1
if (c<=len(b)):
while (b[i]>b[i]+1):
decres=decres+1
cont=decres+cres
if (len(b)==cont):
print('S')
else:
print('N')
return(pico)
n=int(input('Digite a quantidade de elementos da lista: '))
a=[]
for z in range (1, n+1, 1):
valor=float(input('Digite os elementos da lista: '))
a.append(valor)
print(pico(a))
| [
"[email protected]"
] | |
e643e7e82c014086d290c39d39a507f120ea7360 | c9d02cc6ac33723f8dbd7013ae3210c28691d125 | /instances2dict_with_polygons.py | 8a0f2948b2862672119535a3eaf35bfc13c2eae1 | [] | no_license | nhtlongcs/cityscape_to_polylines | 05c040a998d0bfdf9ff1958f540229a8b69e1c48 | 55f91fc33c50fc5563164fb215c7af7c3f11a278 | refs/heads/master | 2022-04-28T09:52:01.628969 | 2020-05-02T07:17:51 | 2020-05-02T07:17:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,557 | py | #!/usr/bin/python
#
# Convert instances from png files to a dictionary
# This files is created according to https://github.com/facebookresearch/Detectron/issues/111
from __future__ import print_function, absolute_import, division
import os, sys
sys.path.append( os.path.normpath( os.path.join( os.path.dirname( __file__ ) , '..' , 'helpers' ) ) )
# Cityscapes imports
from external.cityscapesscripts.evaluation.instance import *
from external.cityscapesscripts.helpers.csHelpers import *
# from csHelpers import *
import cv2
import cv2_util
# from PIL import Image
# import numpy as np
def instances2dict_with_polygons(imageFileList, verbose=False):
imgCount = 0
instanceDict = {}
if not isinstance(imageFileList, list):
imageFileList = [imageFileList]
if verbose:
print("Processing {} images...".format(len(imageFileList)))
for imageFileName in imageFileList:
# Load image
img = Image.open(imageFileName)
# print(imageFileName)
# Image as numpy array
imgNp = np.array(img)
# Initialize label categories
instances = {}
for label in labels:
instances[label.name] = []
# Loop through all instance ids in instance image
for instanceId in np.unique(imgNp):
# if instanceId < 1000:
# continue
instanceObj = Instance(imgNp, instanceId)
instanceObj_dict = instanceObj.toDict()
#instances[id2label[instanceObj.labelID].name].append(instanceObj.toDict())
if id2label[instanceObj.labelID].hasInstances:
mask = (imgNp == instanceId).astype(np.uint8)
contour, hier = cv2_util.findContours(
mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
polygons = [c.reshape(-1).tolist() for c in contour]
instanceObj_dict['contours'] = polygons
instances[id2label[instanceObj.labelID].name].append(instanceObj_dict)
imgKey = os.path.abspath(imageFileName)
instanceDict[imgKey] = instances
imgCount += 1
if verbose:
print("\rImages Processed: {}".format(imgCount), end=' ')
sys.stdout.flush()
if verbose:
print("")
return instanceDict
def main(argv):
fileList = []
if (len(argv) > 2):
for arg in argv:
if ("png" in arg):
fileList.append(arg)
instances2dict_with_polygons(fileList, True)
if __name__ == "__main__":
main(sys.argv[1:]) | [
"[email protected]"
] | |
2691d4e2fcd1060926b734171d61fc077aac74a1 | 2d73ac2c921bb84756478e042ba33ba09c6f8be0 | /sxm_player/workers/status.py | 874b3668f44ed5921ce5b6e4d90da1fffd866df8 | [
"MIT"
] | permissive | fdigeron/sxm-player | 7aa6aba111b1c6bedf6ed8e6c89f7d66feb26c8d | 2ca91fe216d1ad823b1ad7f9cfe43db4a016bd96 | refs/heads/master | 2023-06-23T18:34:47.892363 | 2021-07-30T13:27:04 | 2021-07-30T13:27:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,347 | py | import httpx
from ..queue import EventMessage, EventTypes
from .base import SXMLoopedWorker
__all__ = ["StatusWorker"]
CHECK_INTERVAL = 30
class StatusWorker(SXMLoopedWorker):
NAME = "status_check"
_ip: str
_port: int
_delay: float = 30.0
_failures: int = 0
def __init__(self, port: int, ip: str, *args, **kwargs):
super().__init__(*args, **kwargs)
if ip == "0.0.0.0": # nosec
ip = "127.0.0.1"
self._ip = ip
self._port = port
def loop(self):
self.check_sxm()
def check_sxm(self):
if self._state.sxm_running:
self._log.debug("Checking SXM Client")
r = httpx.get(f"http://{self._ip}:{self._port}/channels/")
if r.is_error:
# adjust delay to check more often
self._delay = 5.0
self._failures += 1
if self._failures > 3:
self.push_event(
EventMessage(
self.name, EventTypes.RESET_SXM, "bad status check"
)
)
else:
self._delay = 30.0
self._failures = 0
self.push_event(
EventMessage(self.name, EventTypes.UPDATE_CHANNELS, r.json())
)
| [
"[email protected]"
] | |
eea64d13a9b9e501d67e09b3316d74dc54007207 | edd1adb88112045d16d3e6417117d45ceed4a634 | /classical/woodworking-sat11-strips/api.py | 54d9042dbc3b4d84a2cf98e4827effa7980c343b | [] | no_license | AI-Planning/classical-domains | 26de25bf23622f95c877960c1d52f444922d8737 | 4bd0b42d89ea02bd38af6f93cf20a0ab0cbda9d9 | refs/heads/main | 2023-04-27T07:55:55.832869 | 2023-03-29T01:46:11 | 2023-03-29T01:46:11 | 253,298,999 | 24 | 12 | null | 2023-04-18T01:45:39 | 2020-04-05T18:02:53 | PDDL | UTF-8 | Python | false | false | 2,598 | py | domains = [
{'description': 'Simulates the works in a woodworking workshop where there is some quantity of wood that has to be polished, coloured, etc. using different tools with different costs. Parameters of each problem are the parts to be done and the quantity (in % of necessary) of available wood (boards). The higher the number of parts and the boards the more difficult the problem is.',
'ipc': '2011',
'name': 'woodworking',
'problems': [('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p01.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p02.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p03.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p04.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p05.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p06.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p07.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p08.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p09.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p10.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p11.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p12.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p13.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p14.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p15.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p16.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p17.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p18.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p19.pddl'),
('woodworking-sat11-strips/domain.pddl',
'woodworking-sat11-strips/p20.pddl')]}
] | [
"[email protected]"
] | |
cfdc5c14013630e610a586b320bf708cda421dd0 | 871107e8ac9e7631057c9c9b02d3fd733e00fe2c | /Client/screens/introscreen.py | 602202fdfd03760e173b0f4593127a50fce4d43b | [] | no_license | jamesfowkes/Snackspace | 40993cfc26b23463939ebaa8fa37f666086be3c9 | d93633aaf25039b9a548b073e69d8f23c94b71e3 | refs/heads/master | 2018-12-30T10:56:48.786485 | 2013-10-24T20:49:04 | 2013-10-24T20:49:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,157 | py | """
introscreen.py
The first screen to be displayed when snackspace starts.
"""
from .displayconstants import Colours, Screens
from .screen import Screen
from .introscreen_gui import IntroScreenGUI
class IntroScreen(Screen, IntroScreenGUI):
""" Implementation of introduction screen """
def __init__(self, width, height, manager, owner):
Screen.__init__(self, manager, owner, Screens.INTROSCREEN)
IntroScreenGUI.__init__(self, width, height, self)
def _update_on_active(self):
pass
def on_rfid(self):
""" Handle RFID swipe: just request a switch to main screen """
if self.active:
self.screen_manager.req(Screens.MAINSCREEN)
def on_bad_rfid(self):
""" Do nothing on touchscreen press """
pass
def on_gui_event(self, pos):
""" Do nothing on touchscreen press """
pass
def on_key_event(self, key):
""" Do nothing on keyboard press """
pass
def on_scan(self, __product):
""" Handle barcode scan: just request a switch to main screen """
if self.active:
self.screen_manager.req(Screens.MAINSCREEN)
def on_bad_scan(self, __barcode):
""" Handle invalid barcode scan: show a banner """
if self.active:
self.set_banner_with_timeout("Unknown barcode: '%s'" % __barcode, 4, Colours.ERR, self._banner_timeout)
self._request_redraw()
def set_db_state(self, db_connected):
""" Handle change of database state: update GUI to reflect """
if not db_connected:
self.set_intro_text("ERROR: Cannot access Snackspace remote database", Colours.ERR)
else:
self.set_intro_text("Scan an item or swipe your card to start", Colours.FG)
self._request_redraw()
def _banner_timeout(self):
""" Callback from GUI indicating banner has timed out """
self.hide_banner()
self._request_redraw()
def _request_redraw(self):
""" Push a request for this screen to be drawn again """
self.screen_manager.req(self.screen_id) | [
"[email protected]"
] | |
3dd8660eed53064c4439f1f5d6692774495f1ed8 | 4e139c024f09eb547304c2cb2d1399a334f66c92 | /wikigen/settings.py | 615f81170857207c997a2aa14e95fc6fc85ecc86 | [] | no_license | epochx/PEER | 04dd77fd638858fe285c9fcee3ad6a4ccd283e9a | de52c45d121b63dee28f72b68de2625c8ec2bb66 | refs/heads/master | 2023-02-27T15:29:09.086756 | 2021-02-01T07:54:29 | 2021-02-01T07:54:29 | 264,897,876 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | #!/usr/bin/env python
# -*-coding: utf8 -*-
import os
CODE_ROOT = os.path.dirname(os.path.realpath(__file__))
HOME = os.environ["HOME"]
DATA_PATH = os.path.join(HOME, "data", "PEER")
SPLITS_PATH = os.path.join(DATA_PATH, "splits")
EDITS_PATH = os.path.join(DATA_PATH, "edits")
RESULTS_PATH = os.path.join(HOME, "results", "PEER")
_DB_NAME = "runs.db"
PARAM_IGNORE_LIST = [
"results_path",
"overwrite",
"force_dataset_reload",
"verbose",
"write_mode",
]
DATABASE_CONNECTION_STRING = "sqlite:///" + os.path.join(RESULTS_PATH, _DB_NAME)
try:
DATASET_NAMES = [
name.replace(".jsonl", "") for name in os.listdir(EDITS_PATH)
]
except FileNotFoundError:
DATASET_NAMES = []
| [
"[email protected]"
] | |
d6b5386e2ea175f5f51171bffa0c9efee0e4d949 | c9094a4ed256260bc026514a00f93f0b09a5d60c | /homeassistant/helpers/debounce.py | 23727c2a00fe774c1bdc55970457e11e4ddccf7a | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508713 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 3,861 | py | """Debounce helper."""
import asyncio
from logging import Logger
from typing import Any, Awaitable, Callable, Optional
from homeassistant.core import HassJob, HomeAssistant, callback
class Debouncer:
"""Class to rate limit calls to a specific command."""
def __init__(
self,
hass: HomeAssistant,
logger: Logger,
*,
cooldown: float,
immediate: bool,
function: Optional[Callable[..., Awaitable[Any]]] = None,
):
"""Initialize debounce.
immediate: indicate if the function needs to be called right away and
wait <cooldown> until executing next invocation.
function: optional and can be instantiated later.
"""
self.hass = hass
self.logger = logger
self._function = function
self.cooldown = cooldown
self.immediate = immediate
self._timer_task: Optional[asyncio.TimerHandle] = None
self._execute_at_end_of_timer: bool = False
self._execute_lock = asyncio.Lock()
self._job: Optional[HassJob] = None if function is None else HassJob(function)
@property
def function(self) -> Optional[Callable[..., Awaitable[Any]]]:
"""Return the function being wrapped by the Debouncer."""
return self._function
@function.setter
def function(self, function: Callable[..., Awaitable[Any]]) -> None:
"""Update the function being wrapped by the Debouncer."""
self._function = function
if self._job is None or function != self._job.target:
self._job = HassJob(function)
async def async_call(self) -> None:
"""Call the function."""
assert self._job is not None
if self._timer_task:
if not self._execute_at_end_of_timer:
self._execute_at_end_of_timer = True
return
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
if not self.immediate:
self._execute_at_end_of_timer = True
self._schedule_timer()
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return
task = self.hass.async_run_hass_job(self._job)
if task:
await task
self._schedule_timer()
async def _handle_timer_finish(self) -> None:
"""Handle a finished timer."""
assert self._job is not None
self._timer_task = None
if not self._execute_at_end_of_timer:
return
self._execute_at_end_of_timer = False
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return # type: ignore
try:
task = self.hass.async_run_hass_job(self._job)
if task:
await task
except Exception: # pylint: disable=broad-except
self.logger.exception("Unexpected exception from %s", self.function)
self._schedule_timer()
@callback
def async_cancel(self) -> None:
"""Cancel any scheduled call."""
if self._timer_task:
self._timer_task.cancel()
self._timer_task = None
self._execute_at_end_of_timer = False
@callback
def _schedule_timer(self) -> None:
"""Schedule a timer."""
self._timer_task = self.hass.loop.call_later(
self.cooldown,
lambda: self.hass.async_create_task(self._handle_timer_finish()),
)
| [
"[email protected]"
] | |
e45e706f80a74d2a8924775331da036844f19fd9 | ef821468b081ef2a0b81bf08596a2c81e1c1ef1a | /Python OOP/Iterators_and_Generators-LAB/Squares.py | 219b82c3944acc802bf2321227dea9b7f7341b33 | [] | no_license | Ivaylo-Atanasov93/The-Learning-Process | 71db22cd79f6d961b9852f140f4285ef7820dd80 | 354844e2c686335345f6a54b3af86b78541ed3f3 | refs/heads/master | 2023-03-30T20:59:34.304207 | 2021-03-29T15:23:05 | 2021-03-29T15:23:05 | 294,181,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | def squares(n):
for i in range(1, n + 1):
yield i ** 2
print(list(squares(5))) | [
"[email protected]"
] | |
81608d1980eaa0bcd10242f77e99c0a5aad22b73 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-apm/huaweicloudsdkapm/v1/model/delete_app_request.py | 7b641893dbfb446555ef0b94333f7954ceaf7ae2 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,007 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DeleteAppRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'application_id': 'int',
'x_business_id': 'int'
}
attribute_map = {
'application_id': 'application_id',
'x_business_id': 'x-business-id'
}
def __init__(self, application_id=None, x_business_id=None):
"""DeleteAppRequest
The model defined in huaweicloud sdk
:param application_id: 组件id。
:type application_id: int
:param x_business_id: 应用id,用于鉴权。
:type x_business_id: int
"""
self._application_id = None
self._x_business_id = None
self.discriminator = None
self.application_id = application_id
self.x_business_id = x_business_id
@property
def application_id(self):
"""Gets the application_id of this DeleteAppRequest.
组件id。
:return: The application_id of this DeleteAppRequest.
:rtype: int
"""
return self._application_id
@application_id.setter
def application_id(self, application_id):
"""Sets the application_id of this DeleteAppRequest.
组件id。
:param application_id: The application_id of this DeleteAppRequest.
:type application_id: int
"""
self._application_id = application_id
@property
def x_business_id(self):
"""Gets the x_business_id of this DeleteAppRequest.
应用id,用于鉴权。
:return: The x_business_id of this DeleteAppRequest.
:rtype: int
"""
return self._x_business_id
@x_business_id.setter
def x_business_id(self, x_business_id):
"""Sets the x_business_id of this DeleteAppRequest.
应用id,用于鉴权。
:param x_business_id: The x_business_id of this DeleteAppRequest.
:type x_business_id: int
"""
self._x_business_id = x_business_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeleteAppRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
9ed0fdfefdf7006316db3f03a97b40008588cfa4 | 2966b05a4b8239a3ab95acf26423e706582e4b42 | /kvirt/kvm/__init__.py | 6daebf1c19ebe570816a6f49688f4d84bfdb1e97 | [] | no_license | goffinet/kcli | 80ea47c3b928ee1c7d85a7c4519f82c6c8144cee | 69b225137bf543fabcebd7478802fb9d725a82ab | refs/heads/master | 2021-01-19T19:18:42.803401 | 2017-03-02T01:29:39 | 2017-03-02T01:29:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61,596 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
interact with a local/remote libvirt daemon
"""
from kvirt import defaults
from iptools import IpRange
# from jinja2 import Environment
from kvirt import common
from netaddr import IPAddress, IPNetwork
from libvirt import open as libvirtopen
import os
import string
import time
import xml.etree.ElementTree as ET
KB = 1024 * 1024
MB = 1024 * KB
guestrhel532 = "rhel_5"
guestrhel564 = "rhel_5x64"
guestrhel632 = "rhel_6"
guestrhel664 = "rhel_6x64"
guestrhel764 = "rhel_7x64"
guestother = "other"
guestotherlinux = "other_linux"
guestwindowsxp = "windows_xp"
guestwindows7 = "windows_7"
guestwindows764 = "windows_7x64"
guestwindows2003 = "windows_2003"
guestwindows200364 = "windows_2003x64"
guestwindows2008 = "windows_2008"
guestwindows200864 = "windows_2008x64"
class Kvirt:
def __init__(self, host='127.0.0.1', port=None, user='root', protocol='ssh', url=None):
if url is None:
if host == '127.0.0.1' or host == 'localhost':
url = "qemu:///system"
elif port:
url = "qemu+%s://%s@%s:%s/system?socket=/var/run/libvirt/libvirt-sock" % (protocol, user, host, port)
elif protocol == 'ssh':
url = "qemu+%s://%s@%s/system?socket=/var/run/libvirt/libvirt-sock" % (protocol, user, host)
else:
url = "qemu:///system"
try:
self.conn = libvirtopen(url)
except Exception:
self.conn = None
self.host = host
self.user = user
self.port = port
self.protocol = protocol
if self.protocol == 'ssh' and port is None:
self.port = '22'
def close(self):
conn = self.conn
conn.close()
self.conn = None
def exists(self, name):
conn = self.conn
for vm in conn.listAllDomains():
if vm.name() == name:
return True
return False
def net_exists(self, name):
conn = self.conn
try:
conn.networkLookupByName(name)
return True
except:
return False
def disk_exists(self, pool, name):
conn = self.conn
try:
storage = conn.storagePoolLookupByName(pool)
storage.refresh()
for stor in sorted(storage.listVolumes()):
if stor == name:
return True
except:
return False
def create(self, name, virttype='kvm', title='', description='kvirt', numcpus=2, memory=512, guestid='guestrhel764', pool='default', template=None, disks=[{'size': 10}], disksize=10, diskthin=True, diskinterface='virtio', nets=['default'], iso=None, vnc=False, cloudinit=True, reserveip=False, reservedns=False, start=True, keys=None, cmds=None, ips=None, netmasks=None, gateway=None, nested=True, dns=None, domain=None, tunnel=False, files=[]):
default_diskinterface = diskinterface
default_diskthin = diskthin
default_disksize = disksize
default_pool = pool
conn = self.conn
try:
default_storagepool = conn.storagePoolLookupByName(default_pool)
except:
return {'result': 'failure', 'reason': "Pool %s not found" % default_pool}
default_poolxml = default_storagepool.XMLDesc(0)
root = ET.fromstring(default_poolxml)
default_pooltype = root.getiterator('pool')[0].get('type')
default_poolpath = None
for element in root.getiterator('path'):
default_poolpath = element.text
break
if vnc:
display = 'vnc'
else:
display = 'spice'
volumes = {}
volumespaths = {}
for p in conn.listStoragePools():
poo = conn.storagePoolLookupByName(p)
poo.refresh(0)
for vol in poo.listAllVolumes():
volumes[vol.name()] = {'pool': poo, 'object': vol}
volumespaths[vol.path()] = {'pool': poo, 'object': vol}
networks = []
bridges = []
for net in conn.listNetworks():
networks.append(net)
for net in conn.listInterfaces():
if net != 'lo':
bridges.append(net)
machine = 'pc'
sysinfo = "<smbios mode='sysinfo'/>"
disksxml = ''
volsxml = {}
for index, disk in enumerate(disks):
if disk is None:
disksize = default_disksize
diskthin = default_diskthin
diskinterface = default_diskinterface
diskpool = default_pool
diskpooltype = default_pooltype
diskpoolpath = default_poolpath
elif isinstance(disk, int):
disksize = disk
diskthin = default_diskthin
diskinterface = default_diskinterface
diskpool = default_pool
diskpooltype = default_pooltype
diskpoolpath = default_poolpath
elif isinstance(disk, dict):
disksize = disk.get('size', default_disksize)
diskthin = disk.get('thin', default_diskthin)
diskinterface = disk.get('interface', default_diskinterface)
diskpool = disk.get('pool', default_pool)
diskwwn = disk.get('wwn')
try:
storagediskpool = conn.storagePoolLookupByName(diskpool)
except:
return {'result': 'failure', 'reason': "Pool %s not found" % diskpool}
diskpoolxml = storagediskpool.XMLDesc(0)
root = ET.fromstring(diskpoolxml)
diskpooltype = root.getiterator('pool')[0].get('type')
diskpoolpath = None
for element in root.getiterator('path'):
diskpoolpath = element.text
break
else:
return {'result': 'failure', 'reason': "Invalid disk entry"}
letter = chr(index + ord('a'))
diskdev, diskbus = 'vd%s' % letter, 'virtio'
if diskinterface != 'virtio':
diskdev, diskbus = 'hd%s' % letter, 'ide'
diskformat = 'qcow2'
if not diskthin:
diskformat = 'raw'
storagename = "%s_%d.img" % (name, index + 1)
diskpath = "%s/%s" % (diskpoolpath, storagename)
if template is not None and index == 0:
try:
default_storagepool.refresh(0)
if '/' in template:
backingvolume = volumespaths[template]['object']
else:
backingvolume = volumes[template]['object']
backingxml = backingvolume.XMLDesc(0)
root = ET.fromstring(backingxml)
except:
return {'result': 'failure', 'reason': "Invalid template %s" % template}
backing = backingvolume.path()
if '/dev' in backing and diskpooltype == 'dir':
return {'result': 'failure', 'reason': "lvm template can not be used with a dir pool.Leaving..."}
if '/dev' not in backing and diskpooltype == 'logical':
return {'result': 'failure', 'reason': "file template can not be used with a lvm pool.Leaving..."}
backingxml = """<backingStore type='file' index='1'>
<format type='qcow2'/>
<source file='%s'/>
<backingStore/>
</backingStore>""" % backing
else:
backing = None
backingxml = '<backingStore/>'
volxml = self._xmlvolume(path=diskpath, size=disksize, pooltype=diskpooltype, backing=backing, diskformat=diskformat)
if diskpool in volsxml:
volsxml[diskpool].append(volxml)
else:
volsxml[diskpool] = [volxml]
if diskpooltype == 'logical':
diskformat = 'raw'
if diskwwn is not None and diskbus == 'ide':
diskwwn = '0x%016x' % diskwwn
diskwwn = "<wwn>%s</wwn>" % diskwwn
else:
diskwwn = ''
disksxml = """%s<disk type='file' device='disk'>
<driver name='qemu' type='%s'/>
<source file='%s'/>
%s
<target dev='%s' bus='%s'/>
%s
</disk>""" % (disksxml, diskformat, diskpath, backingxml, diskdev, diskbus, diskwwn)
netxml = ''
version = ''
for index, net in enumerate(nets):
macxml = ''
if isinstance(net, str):
netname = net
elif isinstance(net, dict) and 'name' in net:
netname = net['name']
ip = None
if ips and len(ips) > index and ips[index] is not None:
ip = ips[index]
nets[index]['ip'] = ip
elif 'ip' in nets[index]:
ip = nets[index]['ip']
if 'mac' in nets[index]:
mac = nets[index]['mac']
macxml = "<mac address='%s'/>" % mac
if index == 0 and ip is not None:
version = "<entry name='version'>%s</entry>" % ip
if netname in bridges:
sourcenet = 'bridge'
elif netname in networks:
sourcenet = 'network'
else:
return {'result': 'failure', 'reason': "Invalid network %s" % netname}
netxml = """%s
<interface type='%s'>
%s
<source %s='%s'/>
<model type='virtio'/>
</interface>""" % (netxml, sourcenet, macxml, sourcenet, netname)
version = """<sysinfo type='smbios'>
<system>
%s
<entry name='product'>%s</entry>
</system>
</sysinfo>""" % (version, title)
if iso is None:
if cloudinit:
iso = "%s/%s.iso" % (default_poolpath, name)
else:
iso = ''
else:
try:
if os.path.isabs(iso):
shortiso = os.path.basename(iso)
else:
shortiso = iso
isovolume = volumes[shortiso]['object']
iso = isovolume.path()
# iso = "%s/%s" % (default_poolpath, iso)
# iso = "%s/%s" % (isopath, iso)
except:
return {'result': 'failure', 'reason': "Invalid iso %s" % iso}
isoxml = """<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
<source file='%s'/>
<target dev='hdc' bus='ide'/>
<readonly/>
</disk>""" % (iso)
if tunnel:
listen = '127.0.0.1'
else:
listen = '0.0.0.0'
displayxml = """<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='%s' port='-1' autoport='yes' listen='%s'>
<listen type='address' address='%s'/>
</graphics>
<memballoon model='virtio'/>""" % (display, listen, listen)
if nested and virttype == 'kvm':
nestedxml = """<cpu match='exact'>
<model>Westmere</model>
<feature policy='require' name='vmx'/>
</cpu>"""
else:
nestedxml = ""
if self.host in ['localhost', '127.0.0.1']:
serialxml = """<serial type='pty'>
<target port='0'/>
</serial>
<console type='pty'>
<target type='serial' port='0'/>
</console>"""
else:
serialxml = """ <serial type="tcp">
<source mode="bind" host="127.0.0.1" service="%s"/>
<protocol type="telnet"/>
<target port="0"/>
</serial>""" % common.get_free_port()
vmxml = """<domain type='%s'>
<name>%s</name>
<description>%s</description>
%s
<memory unit='MiB'>%d</memory>
<vcpu>%d</vcpu>
<os>
<type arch='x86_64' machine='%s'>hvm</type>
<boot dev='hd'/>
<boot dev='cdrom'/>
<bootmenu enable='yes'/>
%s
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='utc'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
%s
%s
%s
%s
%s
</devices>
%s
</domain>""" % (virttype, name, description, version, memory, numcpus, machine, sysinfo, disksxml, netxml, isoxml, displayxml, serialxml, nestedxml)
for pool in volsxml:
storagepool = conn.storagePoolLookupByName(pool)
storagepool.refresh(0)
for volxml in volsxml[pool]:
storagepool.createXML(volxml, 0)
conn.defineXML(vmxml)
vm = conn.lookupByName(name)
vm.setAutostart(1)
if cloudinit:
common.cloudinit(name=name, keys=keys, cmds=cmds, nets=nets, gateway=gateway, dns=dns, domain=domain, reserveip=reserveip, files=files)
self._uploadimage(name, pool=default_storagepool)
if reserveip:
xml = vm.XMLDesc(0)
vmxml = ET.fromstring(xml)
macs = []
for element in vmxml.getiterator('interface'):
mac = element.find('mac').get('address')
macs.append(mac)
self._reserve_ip(name, nets, macs)
if start:
vm.create()
if reservedns:
self._reserve_dns(name, nets, domain)
return {'result': 'success'}
def start(self, name):
conn = self.conn
status = {0: 'down', 1: 'up'}
try:
vm = conn.lookupByName(name)
vm = conn.lookupByName(name)
if status[vm.isActive()] == "up":
return {'result': 'success'}
else:
vm.create()
return {'result': 'success'}
except:
return {'result': 'failure', 'reason': "VM %s not found" % name}
def stop(self, name):
conn = self.conn
status = {0: 'down', 1: 'up'}
try:
vm = conn.lookupByName(name)
if status[vm.isActive()] == "down":
return {'result': 'success'}
else:
vm.destroy()
return {'result': 'success'}
except:
return {'result': 'failure', 'reason': "VM %s not found" % name}
def restart(self, name):
conn = self.conn
status = {0: 'down', 1: 'up'}
vm = conn.lookupByName(name)
if status[vm.isActive()] == "down":
return {'result': 'success'}
else:
vm.restart()
return {'result': 'success'}
def report(self):
conn = self.conn
hostname = conn.getHostname()
cpus = conn.getCPUMap()[0]
memory = conn.getInfo()[1]
print("Host:%s Cpu:%s Memory:%sMB\n" % (hostname, cpus, memory))
for pool in conn.listStoragePools():
poolname = pool
pool = conn.storagePoolLookupByName(pool)
poolxml = pool.XMLDesc(0)
root = ET.fromstring(poolxml)
pooltype = root.getiterator('pool')[0].get('type')
if pooltype == 'dir':
poolpath = root.getiterator('path')[0].text
else:
poolpath = root.getiterator('device')[0].get('path')
s = pool.info()
used = "%.2f" % (float(s[2]) / 1024 / 1024 / 1024)
available = "%.2f" % (float(s[3]) / 1024 / 1024 / 1024)
# Type,Status, Total space in Gb, Available space in Gb
used = float(used)
available = float(available)
print("Storage:%s Type:%s Path:%s Used space:%sGB Available space:%sGB" % (poolname, pooltype, poolpath, used, available))
print
for interface in conn.listAllInterfaces():
interfacename = interface.name()
if interfacename == 'lo':
continue
print("Network:%s Type:bridged" % (interfacename))
for network in conn.listAllNetworks():
networkname = network.name()
netxml = network.XMLDesc(0)
cidr = 'N/A'
root = ET.fromstring(netxml)
ip = root.getiterator('ip')
if ip:
attributes = ip[0].attrib
firstip = attributes.get('address')
netmask = attributes.get('netmask')
if netmask is None:
netmask = attributes.get('prefix')
try:
ip = IPNetwork('%s/%s' % (firstip, netmask))
cidr = ip.cidr
except:
cidr = "N/A"
dhcp = root.getiterator('dhcp')
if dhcp:
dhcp = True
else:
dhcp = False
print("Network:%s Type:routed Cidr:%s Dhcp:%s" % (networkname, cidr, dhcp))
def status(self, name):
conn = self.conn
status = {0: 'down', 1: 'up'}
try:
vm = conn.lookupByName(name)
except:
return None
return status[vm.isActive()]
def list(self):
vms = []
leases = {}
conn = self.conn
for network in conn.listAllNetworks():
for lease in network.DHCPLeases():
ip = lease['ipaddr']
mac = lease['mac']
leases[mac] = ip
status = {0: 'down', 1: 'up'}
for vm in conn.listAllDomains(0):
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
description = root.getiterator('description')
if description:
description = description[0].text
else:
description = ''
name = vm.name()
state = status[vm.isActive()]
ips = []
title = ''
for element in root.getiterator('interface'):
mac = element.find('mac').get('address')
if vm.isActive():
if mac in leases:
ips.append(leases[mac])
if ips:
ip = ips[-1]
else:
ip = ''
for entry in root.getiterator('entry'):
attributes = entry.attrib
if attributes['name'] == 'version':
ip = entry.text
if attributes['name'] == 'product':
title = entry.text
source = ''
for element in root.getiterator('backingStore'):
s = element.find('source')
if s is not None:
source = os.path.basename(s.get('file'))
break
vms.append([name, state, ip, source, description, title])
return vms
def console(self, name, tunnel=False):
conn = self.conn
vm = conn.lookupByName(name)
if not vm.isActive():
print("VM down")
return
else:
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
for element in root.getiterator('graphics'):
attributes = element.attrib
if attributes['listen'] == '127.0.0.1' or tunnel:
host = '127.0.0.1'
else:
host = self.host
protocol = attributes['type']
port = attributes['port']
if tunnel:
consolecommand = "ssh -f -p %s -L %s:127.0.0.1:%s %s@%s sleep 10" % (self.port, port, port, self.user, self.host)
os.popen(consolecommand)
url = "%s://%s:%s" % (protocol, host, port)
os.popen("remote-viewer %s &" % url)
def serialconsole(self, name):
conn = self.conn
vm = conn.lookupByName(name)
if not vm.isActive():
print("VM down")
return
else:
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
serial = root.getiterator('serial')
if not serial:
print("No serial Console found. Leaving...")
return
elif self.host in ['localhost', '127.0.0.1']:
os.system('virsh console %s' % name)
else:
for element in serial:
serialport = element.find('source').get('service')
if serialport:
if self.protocol != 'ssh':
print("Remote serial Console requires using ssh . Leaving...")
return
else:
serialcommand = "ssh -p %s %s@%s nc 127.0.0.1 %s" % (self.port, self.user, self.host, serialport)
os.system(serialcommand)
def info(self, name):
# ips = []
leases = {}
starts = {0: 'no', 1: 'yes'}
conn = self.conn
for network in conn.listAllNetworks():
for lease in network.DHCPLeases():
ip = lease['ipaddr']
mac = lease['mac']
leases[mac] = ip
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
state = 'down'
autostart = starts[vm.autostart()]
memory = root.getiterator('memory')[0]
unit = memory.attrib['unit']
memory = memory.text
if unit == 'KiB':
memory = float(memory) / 1024
memory = int(memory)
numcpus = root.getiterator('vcpu')[0]
numcpus = numcpus.text
if vm.isActive():
state = 'up'
print("name: %s" % name)
print("status: %s" % state)
print("autostart: %s" % autostart)
description = root.getiterator('description')
if description:
description = description[0].text
else:
description = ''
title = None
for entry in root.getiterator('entry'):
attributes = entry.attrib
if attributes['name'] == 'product':
title = entry.text
print("description: %s" % description)
if title is not None:
print("profile: %s" % title)
print("cpus: %s" % numcpus)
print("memory: %sMB" % memory)
nicnumber = 0
for element in root.getiterator('interface'):
networktype = element.get('type')
device = "eth%s" % nicnumber
mac = element.find('mac').get('address')
if networktype == 'bridge':
bridge = element.find('source').get('bridge')
print("net interfaces: %s mac: %s net: %s type: bridge" % (device, mac, bridge))
else:
network = element.find('source').get('network')
print("net interfaces:%s mac: %s net: %s type: routed" % (device, mac, network))
network = conn.networkLookupByName(network)
if vm.isActive():
if mac in leases:
# ips.append(leases[mac])
print("ip: %s" % leases[mac])
nicnumber = nicnumber + 1
for entry in root.getiterator('entry'):
attributes = entry.attrib
if attributes['name'] == 'version':
ip = entry.text
print("ip: %s" % ip)
break
for element in root.getiterator('disk'):
disktype = element.get('device')
if disktype == 'cdrom':
continue
device = element.find('target').get('dev')
diskformat = 'file'
drivertype = element.find('driver').get('type')
path = element.find('source').get('file')
volume = conn.storageVolLookupByPath(path)
disksize = int(float(volume.info()[1]) / 1024 / 1024 / 1024)
print("diskname: %s disksize: %sGB diskformat: %s type: %s path: %s" % (device, disksize, diskformat, drivertype, path))
def ip(self, name):
leases = {}
conn = self.conn
for network in conn.listAllNetworks():
for lease in network.DHCPLeases():
ip = lease['ipaddr']
mac = lease['mac']
leases[mac] = ip
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
return None
nic = root.getiterator('interface')[0]
mac = nic.find('mac').get('address')
if vm.isActive() and mac in leases:
return leases[mac]
else:
return None
def volumes(self, iso=False):
isos = []
templates = []
default_templates = [os.path.basename(t) for t in defaults.TEMPLATES.values()]
conn = self.conn
for storage in conn.listStoragePools():
storage = conn.storagePoolLookupByName(storage)
storage.refresh(0)
storagexml = storage.XMLDesc(0)
root = ET.fromstring(storagexml)
for element in root.getiterator('path'):
storagepath = element.text
break
for volume in storage.listVolumes():
if volume.endswith('iso'):
isos.append("%s/%s" % (storagepath, volume))
elif volume.endswith('qcow2') or volume in default_templates:
templates.append("%s/%s" % (storagepath, volume))
if iso:
return isos
else:
return templates
def delete(self, name):
conn = self.conn
try:
vm = conn.lookupByName(name)
except:
return
ip = self.ip(name)
status = {0: 'down', 1: 'up'}
vmxml = vm.XMLDesc(0)
root = ET.fromstring(vmxml)
disks = []
for element in root.getiterator('disk'):
source = element.find('source')
if source is not None:
imagefile = element.find('source').get('file')
if imagefile.endswith("%s.iso" % name) or "%s_" % name in imagefile:
disks.append(imagefile)
else:
continue
if status[vm.isActive()] != "down":
vm.destroy()
vm.undefine()
for storage in conn.listStoragePools():
deleted = False
storage = conn.storagePoolLookupByName(storage)
storage.refresh(0)
for stor in storage.listVolumes():
for disk in disks:
if stor in disk:
try:
volume = storage.storageVolLookupByName(stor)
except:
continue
volume.delete(0)
deleted = True
if deleted:
storage.refresh(0)
for element in root.getiterator('interface'):
mac = element.find('mac').get('address')
networktype = element.get('type')
if networktype != 'bridge':
network = element.find('source').get('network')
network = conn.networkLookupByName(network)
netxml = network.XMLDesc(0)
root = ET.fromstring(netxml)
for host in root.getiterator('host'):
hostmac = host.get('mac')
ip = host.get('ip')
hostname = host.get('name')
if hostmac == mac:
hostentry = "<host mac='%s' name='%s' ip='%s'/>" % (mac, hostname, ip)
network.update(2, 4, 0, hostentry, 1)
for host in root.getiterator('host'):
ip = host.get('ip')
hostname = host.find('hostname')
if hostname is not None and hostname.text == name:
hostentry = '<host ip="%s"><hostname>%s</hostname></host>' % (ip, name)
network.update(2, 10, 0, hostentry, 1)
if ip is not None:
os.system("ssh-keygen -q -R %s >/dev/null 2>&1" % ip)
def _xmldisk(self, diskpath, diskdev, diskbus='virtio', diskformat='qcow2', shareable=False):
if shareable:
sharexml = '<shareable/>'
else:
sharexml = ''
diskxml = """<disk type='file' device='disk'>
<driver name='qemu' type='%s' cache='none'/>
<source file='%s'/>
<target bus='%s' dev='%s'/>
%s
</disk>""" % (diskformat, diskpath, diskbus, diskdev, sharexml)
return diskxml
def _xmlvolume(self, path, size, pooltype='file', backing=None, diskformat='qcow2'):
size = int(size) * MB
if int(size) == 0:
size = 500 * 1024
name = os.path.basename(path)
if pooltype == 'block':
volume = """<volume type='block'>
<name>%s</name>
<capacity unit="bytes">%d</capacity>
<target>
<path>%s</path>
<compat>1.1</compat>
</target>
</volume>""" % (name, size, path)
return volume
if backing is not None:
backingstore = """
<backingStore>
<path>%s</path>
<format type='%s'/>
</backingStore>""" % (backing, diskformat)
else:
backingstore = "<backingStore/>"
volume = """
<volume type='file'>
<name>%s</name>
<capacity unit="bytes">%d</capacity>
<target>
<path>%s</path>
<format type='%s'/>
<permissions>
<mode>0644</mode>
</permissions>
<compat>1.1</compat>
</target>
%s
</volume>""" % (name, size, path, diskformat, backingstore)
return volume
def clone(self, old, new, full=False, start=False):
conn = self.conn
oldvm = conn.lookupByName(old)
oldxml = oldvm.XMLDesc(0)
tree = ET.fromstring(oldxml)
uuid = tree.getiterator('uuid')[0]
tree.remove(uuid)
for vmname in tree.getiterator('name'):
vmname.text = new
firstdisk = True
for disk in tree.getiterator('disk'):
if firstdisk or full:
source = disk.find('source')
oldpath = source.get('file')
backingstore = disk.find('backingStore')
backing = None
for b in backingstore.getiterator():
backingstoresource = b.find('source')
if backingstoresource is not None:
backing = backingstoresource.get('file')
newpath = oldpath.replace(old, new)
source.set('file', newpath)
oldvolume = conn.storageVolLookupByPath(oldpath)
oldinfo = oldvolume.info()
oldvolumesize = (float(oldinfo[1]) / 1024 / 1024 / 1024)
newvolumexml = self._xmlvolume(newpath, oldvolumesize, backing)
pool = oldvolume.storagePoolLookupByVolume()
pool.createXMLFrom(newvolumexml, oldvolume, 0)
firstdisk = False
else:
devices = tree.getiterator('devices')[0]
devices.remove(disk)
for interface in tree.getiterator('interface'):
mac = interface.find('mac')
interface.remove(mac)
if self.host not in ['127.0.0.1', 'localhost']:
for serial in tree.getiterator('serial'):
source = serial.find('source')
source.set('service', str(common.get_free_port()))
newxml = ET.tostring(tree)
conn.defineXML(newxml)
vm = conn.lookupByName(new)
if start:
vm.setAutostart(1)
vm.create()
def _reserve_ip(self, name, nets, macs):
conn = self.conn
for index, net in enumerate(nets):
if not isinstance(net, dict):
continue
ip = net.get('ip')
network = net.get('name')
mac = macs[index]
if ip is None or network is None:
continue
network = conn.networkLookupByName(network)
oldnetxml = network.XMLDesc()
root = ET.fromstring(oldnetxml)
ipentry = root.getiterator('ip')
if ipentry:
attributes = ipentry[0].attrib
firstip = attributes.get('address')
netmask = attributes.get('netmask')
netip = IPNetwork('%s/%s' % (firstip, netmask))
dhcp = root.getiterator('dhcp')
if not dhcp:
continue
if not IPAddress(ip) in netip:
continue
network.update(4, 4, 0, '<host mac="%s" name="%s" ip="%s" />' % (mac, name, ip), 1)
def _reserve_dns(self, name, nets, domain):
conn = self.conn
net = nets[0]
ip = None
if isinstance(net, dict):
ip = net.get('ip')
network = net.get('name')
else:
network = net
if ip is None:
counter = 0
while counter != 80:
ip = self.ip(name)
if ip is None:
time.sleep(5)
print("Waiting 5 seconds to grab ip and create DNS record...")
counter += 10
else:
break
if ip is None:
print("Couldn't assign DNS")
return
network = conn.networkLookupByName(network)
oldnetxml = network.XMLDesc()
root = ET.fromstring(oldnetxml)
dns = root.getiterator('dns')
if not dns:
base = root.getiterator('network')[0]
dns = ET.Element("dns")
base.append(dns)
newxml = ET.tostring(root)
conn.networkDefineXML(newxml)
if domain is not None:
# If there is a domain, add it to the dns too :)
network.update(4, 10, 0, '<host ip="%s"><hostname>%s</hostname><hostname>%s.%s</hostname></host>' % (ip, name, name, domain), 1)
else:
network.update(4, 10, 0, '<host ip="%s"><hostname>%s</hostname></host>' % (ip, name), 1)
def handler(self, stream, data, file_):
return file_.read(data)
def _uploadimage(self, name, pool='default', origin='/tmp', suffix='.iso'):
name = "%s%s" % (name, suffix)
conn = self.conn
poolxml = pool.XMLDesc(0)
root = ET.fromstring(poolxml)
for element in root.getiterator('path'):
poolpath = element.text
break
imagepath = "%s/%s" % (poolpath, name)
imagexml = self._xmlvolume(path=imagepath, size=0, diskformat='raw')
pool.createXML(imagexml, 0)
imagevolume = conn.storageVolLookupByPath(imagepath)
stream = conn.newStream(0)
imagevolume.upload(stream, 0, 0)
with open("%s/%s" % (origin, name)) as ori:
stream.sendAll(self.handler, ori)
stream.finish()
def update_ip(self, name, ip):
conn = self.conn
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
if not vm:
print("VM %s not found" % name)
if vm.isActive() == 1:
print("Machine up. Change will only appear upon next reboot")
osentry = root.getiterator('os')[0]
smbios = osentry.find('smbios')
if smbios is None:
newsmbios = ET.Element("smbios", mode="sysinfo")
osentry.append(newsmbios)
sysinfo = root.getiterator('sysinfo')
system = root.getiterator('system')
if not sysinfo:
sysinfo = ET.Element("sysinfo", type="smbios")
root.append(sysinfo)
sysinfo = root.getiterator('sysinfo')[0]
if not system:
system = ET.Element("system")
sysinfo.append(system)
system = root.getiterator('system')[0]
versionfound = False
for entry in root.getiterator('entry'):
attributes = entry.attrib
if attributes['name'] == 'version':
entry.text = ip
versionfound = True
if not versionfound:
version = ET.Element("entry", name="version")
version.text = ip
system.append(version)
newxml = ET.tostring(root)
conn.defineXML(newxml)
def update_memory(self, name, memory):
conn = self.conn
memory = str(int(memory) * 1024)
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
memorynode = root.getiterator('memory')[0]
memorynode.text = memory
currentmemory = root.getiterator('currentMemory')[0]
currentmemory.text = memory
newxml = ET.tostring(root)
conn.defineXML(newxml)
def update_cpu(self, name, numcpus):
conn = self.conn
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
cpunode = root.getiterator('vcpu')[0]
cpunode.text = numcpus
newxml = ET.tostring(root)
conn.defineXML(newxml)
def update_start(self, name, start=True):
conn = self.conn
try:
vm = conn.lookupByName(name)
except:
print("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
if start:
vm.setAutostart(1)
else:
vm.setAutostart(0)
return {'result': 'success'}
def create_disk(self, name, size, pool=None, thin=True, template=None):
conn = self.conn
diskformat = 'qcow2'
if size < 1:
print("Incorrect size.Leaving...")
return
if not thin:
diskformat = 'raw'
if pool is not None:
pool = conn.storagePoolLookupByName(pool)
poolxml = pool.XMLDesc(0)
poolroot = ET.fromstring(poolxml)
pooltype = poolroot.getiterator('pool')[0].get('type')
for element in poolroot.getiterator('path'):
poolpath = element.text
break
else:
print("Pool not found. Leaving....")
return
if template is not None:
volumes = {}
for p in conn.listStoragePools():
poo = conn.storagePoolLookupByName(p)
for vol in poo.listAllVolumes():
volumes[vol.name()] = vol.path()
if template not in volumes and template not in volumes.values():
print("Invalid template %s.Leaving..." % template)
if template in volumes:
template = volumes[template]
pool.refresh(0)
diskpath = "%s/%s" % (poolpath, name)
if pooltype == 'logical':
diskformat = 'raw'
volxml = self._xmlvolume(path=diskpath, size=size, pooltype=pooltype,
diskformat=diskformat, backing=template)
pool.createXML(volxml, 0)
return diskpath
# def add_disk(self, name, size, pool=None, thin=True, template=None, shareable=False):
# conn = self.conn
# diskformat = 'qcow2'
# diskbus = 'virtio'
# if size < 1:
# print("Incorrect size.Leaving...")
# return
# if not thin:
# diskformat = 'raw'
# try:
# vm = conn.lookupByName(name)
# xml = vm.XMLDesc(0)
# root = ET.fromstring(xml)
# except:
# print("VM %s not found" % name)
# return
# currentdisk = 0
# for element in root.getiterator('disk'):
# disktype = element.get('device')
# if disktype == 'cdrom':
# continue
# currentdisk = currentdisk + 1
# diskindex = currentdisk + 1
# diskdev = "vd%s" % string.ascii_lowercase[currentdisk]
# if pool is not None:
# pool = conn.storagePoolLookupByName(pool)
# poolxml = pool.XMLDesc(0)
# poolroot = ET.fromstring(poolxml)
# pooltype = poolroot.getiterator('pool')[0].get('type')
# for element in poolroot.getiterator('path'):
# poolpath = element.text
# break
# else:
# print("Pool not found. Leaving....")
# return
# if template is not None:
# volumes = {}
# for p in conn.listStoragePools():
# poo = conn.storagePoolLookupByName(p)
# for vol in poo.listAllVolumes():
# volumes[vol.name()] = vol.path()
# if template not in volumes and template not in volumes.values():
# print("Invalid template %s.Leaving..." % template)
# if template in volumes:
# template = volumes[template]
# pool.refresh(0)
# storagename = "%s_%d.img" % (name, diskindex)
# diskpath = "%s/%s" % (poolpath, storagename)
# volxml = self._xmlvolume(path=diskpath, size=size, pooltype=pooltype,
# diskformat=diskformat, backing=template)
# if pooltype == 'logical':
# diskformat = 'raw'
# diskxml = self._xmldisk(diskpath=diskpath, diskdev=diskdev, diskbus=diskbus, diskformat=diskformat, shareable=shareable)
# pool.createXML(volxml, 0)
# vm.attachDevice(diskxml)
# vm = conn.lookupByName(name)
# vmxml = vm.XMLDesc(0)
# conn.defineXML(vmxml)
def add_disk(self, name, size, pool=None, thin=True, template=None, shareable=False, existing=None):
conn = self.conn
diskformat = 'qcow2'
diskbus = 'virtio'
if size < 1:
print("Incorrect size.Leaving...")
return
if not thin:
diskformat = 'raw'
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
currentdisk = 0
for element in root.getiterator('disk'):
disktype = element.get('device')
if disktype == 'cdrom':
continue
currentdisk = currentdisk + 1
diskindex = currentdisk + 1
diskdev = "vd%s" % string.ascii_lowercase[currentdisk]
if existing is None:
storagename = "%s_%d.img" % (name, diskindex)
diskpath = self.create_disk(name=storagename, size=size, pool=pool, thin=thin, template=template)
else:
diskpath = existing
diskxml = self._xmldisk(diskpath=diskpath, diskdev=diskdev, diskbus=diskbus, diskformat=diskformat, shareable=shareable)
vm.attachDevice(diskxml)
vm = conn.lookupByName(name)
vmxml = vm.XMLDesc(0)
conn.defineXML(vmxml)
def delete_disk(self, name, diskname):
conn = self.conn
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
for element in root.getiterator('disk'):
disktype = element.get('device')
diskdev = element.find('target').get('dev')
diskbus = element.find('target').get('bus')
diskformat = element.find('driver').get('type')
if disktype == 'cdrom':
continue
diskpath = element.find('source').get('file')
volume = self.conn.storageVolLookupByPath(diskpath)
if volume.name() == diskname or volume.path() == diskname:
diskxml = self._xmldisk(diskpath=diskpath, diskdev=diskdev, diskbus=diskbus, diskformat=diskformat)
vm.detachDevice(diskxml)
volume.delete(0)
vm = conn.lookupByName(name)
vmxml = vm.XMLDesc(0)
conn.defineXML(vmxml)
return
print("Disk %s not found in %s" % (diskname, name))
def list_disks(self):
volumes = {}
for p in self.conn.listStoragePools():
poo = self.conn.storagePoolLookupByName(p)
for volume in poo.listAllVolumes():
volumes[volume.name()] = {'pool': poo.name(), 'path': volume.path()}
return volumes
def add_nic(self, name, network):
conn = self.conn
networks = {}
for interface in conn.listAllInterfaces():
networks[interface.name()] = 'bridge'
for net in conn.listAllNetworks():
networks[net.name()] = 'network'
try:
vm = conn.lookupByName(name)
except:
print("VM %s not found" % name)
return
if network not in networks:
print("Network %s not found" % network)
return
else:
networktype = networks[network]
source = "<source %s='%s'/>" % (networktype, network)
nicxml = """<interface type='%s'>
%s
<model type='virtio'/>
</interface>""" % (networktype, source)
vm.attachDevice(nicxml)
vm = conn.lookupByName(name)
vmxml = vm.XMLDesc(0)
conn.defineXML(vmxml)
def delete_nic(self, name, interface):
conn = self.conn
networks = {}
nicnumber = 0
for n in conn.listAllInterfaces():
networks[n.name()] = 'bridge'
for n in conn.listAllNetworks():
networks[n.name()] = 'network'
try:
vm = conn.lookupByName(name)
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
except:
print("VM %s not found" % name)
return
for element in root.getiterator('interface'):
device = "eth%s" % nicnumber
if device == interface:
mac = element.find('mac').get('address')
networktype = element.get('type')
if networktype == 'bridge':
network = element.find('source').get('bridge')
source = "<source %s='%s'/>" % (networktype, network)
else:
network = element.find('source').get('network')
source = "<source %s='%s'/>" % (networktype, network)
break
else:
nicnumber += 1
nicxml = """<interface type='%s'>
<mac address='%s'/>
%s
<model type='virtio'/>
</interface>""" % (networktype, mac, source)
print nicxml
vm.detachDevice(nicxml)
vm = conn.lookupByName(name)
vmxml = vm.XMLDesc(0)
conn.defineXML(vmxml)
def _ssh_credentials(self, name):
ubuntus = ['utopic', 'vivid', 'wily', 'xenial', 'yakkety']
user = 'root'
conn = self.conn
try:
vm = conn.lookupByName(name)
except:
print("VM %s not found" % name)
return '', ''
if vm.isActive() != 1:
print("Machine down. Cannot ssh...")
return '', ''
vm = [v for v in self.list() if v[0] == name][0]
template = vm[3]
if template != '':
if 'centos' in template.lower():
user = 'centos'
elif 'cirros' in template.lower():
user = 'cirros'
elif [x for x in ubuntus if x in template.lower()]:
user = 'ubuntu'
elif 'fedora' in template.lower():
user = 'fedora'
elif 'rhel' in template.lower():
user = 'cloud-user'
elif 'debian' in template.lower():
user = 'debian'
elif 'arch' in template.lower():
user = 'arch'
ip = vm[2]
if ip == '':
print("No ip found. Cannot ssh...")
return user, ip
def ssh(self, name, local=None, remote=None, tunnel=False):
user, ip = self._ssh_credentials(name)
if ip == '':
return
else:
sshcommand = "%s@%s" % (user, ip)
if self.host not in ['localhost', '127.0.0.1'] and tunnel:
sshcommand = "-o ProxyCommand='ssh -p %s -W %%h:%%p %s@%s' %s" % (self.port, self.user, self.host, sshcommand)
if local is not None:
sshcommand = "-L %s %s" % (local, sshcommand)
if remote is not None:
sshcommand = "-R %s %s" % (remote, sshcommand)
sshcommand = "ssh %s" % sshcommand
os.system(sshcommand)
def scp(self, name, source=None, destination=None, tunnel=False, download=False, recursive=False):
user, ip = self._ssh_credentials(name)
if ip == '':
print("No ip found. Cannot scp...")
else:
if self.host not in ['localhost', '127.0.0.1'] and tunnel:
arguments = "-o ProxyCommand='ssh -p %s -W %%h:%%p %s@%s'" % (self.port, self.user, self.host)
else:
arguments = ''
scpcommand = 'scp'
if recursive:
scpcommand = "%s -r" % scpcommand
if download:
scpcommand = "%s %s %s@%s:%s %s" % (scpcommand, arguments, user, ip, source, destination)
else:
scpcommand = "%s %s %s %s@%s:%s" % (scpcommand, arguments, source, user, ip, destination)
os.system(scpcommand)
def create_pool(self, name, poolpath, pooltype='dir', user='qemu'):
conn = self.conn
for pool in conn.listStoragePools():
if pool == name:
print("Pool %s already there.Leaving..." % name)
return
if pooltype == 'dir':
if self.host == 'localhost' or self.host == '127.0.0.1':
if not os.path.exists(poolpath):
os.makedirs(poolpath)
elif self.protocol == 'ssh':
cmd1 = 'ssh -p %s %s@%s "test -d %s || mkdir %s"' % (self.port, self.user, self.host, poolpath, poolpath)
cmd2 = 'ssh %s@%s "chown %s %s"' % (self.user, self.host, user, poolpath)
os.system(cmd1)
os.system(cmd2)
else:
print("Make sur %s directory exists on hypervisor" % name)
poolxml = """<pool type='dir'>
<name>%s</name>
<source>
</source>
<target>
<path>%s</path>
</target>
</pool>""" % (name, poolpath)
elif pooltype == 'logical':
poolxml = """<pool type='logical'>
<name>%s</name>
<source>
<device path='%s'/>
<name>%s</name>
<format type='lvm2'/>
</source>
<target>
<path>/dev/%s</path>
</target>
</pool>""" % (name, poolpath, name, name)
else:
print("Invalid pool type %s.Leaving..." % pooltype)
return
pool = conn.storagePoolDefineXML(poolxml, 0)
pool.setAutostart(True)
if pooltype == 'logical':
pool.build()
pool.create()
def add_image(self, image, pool):
poolname = pool
shortimage = os.path.basename(image)
conn = self.conn
volumes = []
try:
pool = conn.storagePoolLookupByName(pool)
for vol in pool.listAllVolumes():
volumes.append(vol.name())
except:
return {'result': 'failure', 'reason': "Pool %s not found" % poolname}
poolxml = pool.XMLDesc(0)
root = ET.fromstring(poolxml)
pooltype = root.getiterator('pool')[0].get('type')
if pooltype == 'dir':
poolpath = root.getiterator('path')[0].text
else:
poolpath = root.getiterator('device')[0].get('path')
return {'result': 'failure', 'reason': "Upload to a lvm pool not implemented not found"}
if shortimage in volumes:
return {'result': 'failure', 'reason': "Template %s already exists in pool %s" % (shortimage, poolname)}
if self.host == 'localhost' or self.host == '127.0.0.1':
cmd = 'wget -P %s %s' % (poolpath, image)
elif self.protocol == 'ssh':
cmd = 'ssh -p %s %s@%s "wget -P %s %s"' % (self.port, self.user, self.host, poolpath, image)
os.system(cmd)
pool.refresh()
# self._uploadimage(shortimage, pool=pool, suffix='')
return {'result': 'success'}
def create_network(self, name, cidr, dhcp=True, nat=True):
conn = self.conn
networks = self.list_networks()
cidrs = [network['cidr'] for network in networks.values()]
if name in networks:
return {'result': 'failure', 'reason': "Network %s already exists" % name}
try:
range = IpRange(cidr)
except TypeError:
return {'result': 'failure', 'reason': "Invalid Cidr %s" % cidr}
if IPNetwork(cidr) in cidrs:
return {'result': 'failure', 'reason': "Cidr %s already exists" % cidr}
netmask = IPNetwork(cidr).netmask
gateway = range[1]
if dhcp:
start = range[2]
end = range[-2]
dhcpxml = """<dhcp>
<range start='%s' end='%s'/>
</dhcp>""" % (start, end)
else:
dhcpxml = ''
if nat:
natxml = "<forward mode='nat'><nat><port start='1024' end='65535'/></nat></forward>"
else:
natxml = ''
networkxml = """<network><name>%s</name>
%s
<domain name='%s'/>
<ip address='%s' netmask='%s'>
%s
</ip>
</network>""" % (name, natxml, name, gateway, netmask, dhcpxml)
new_net = conn.networkDefineXML(networkxml)
new_net.setAutostart(True)
new_net.create()
return {'result': 'success'}
def delete_network(self, name=None):
conn = self.conn
try:
network = conn.networkLookupByName(name)
except:
return {'result': 'failure', 'reason': "Network %s not found" % name}
machines = self.network_ports(name)
if machines:
machines = ','.join(machines)
return {'result': 'failure', 'reason': "Network %s is being used by %s" % (name, machines)}
if network.isActive():
network.destroy()
network.undefine()
return {'result': 'success'}
def list_pools(self):
pools = []
conn = self.conn
for pool in conn.listStoragePools():
pools.append(pool)
return pools
def list_networks(self):
networks = {}
conn = self.conn
for network in conn.listAllNetworks():
networkname = network.name()
netxml = network.XMLDesc(0)
cidr = 'N/A'
root = ET.fromstring(netxml)
ip = root.getiterator('ip')
if ip:
attributes = ip[0].attrib
firstip = attributes.get('address')
netmask = attributes.get('netmask')
ip = IPNetwork('%s/%s' % (firstip, netmask))
cidr = ip.cidr
dhcp = root.getiterator('dhcp')
if dhcp:
dhcp = True
else:
dhcp = False
forward = root.getiterator('forward')
if forward:
attributes = forward[0].attrib
mode = attributes.get('mode')
else:
mode = 'isolated'
networks[networkname] = {'cidr': cidr, 'dhcp': dhcp, 'type': 'routed', 'mode': mode}
for interface in conn.listAllInterfaces():
interfacename = interface.name()
if interfacename == 'lo':
continue
netxml = interface.XMLDesc(0)
root = ET.fromstring(netxml)
ip = root.getiterator('ip')
if ip:
attributes = ip[0].attrib
ip = attributes.get('address')
prefix = attributes.get('prefix')
ip = IPNetwork('%s/%s' % (ip, prefix))
cidr = ip.cidr
else:
cidr = 'N/A'
networks[interfacename] = {'cidr': cidr, 'dhcp': 'N/A', 'type': 'bridged', 'mode': 'N/A'}
return networks
def delete_pool(self, name, full=False):
conn = self.conn
try:
pool = conn.storagePoolLookupByName(name)
except:
print("Pool %s not found. Leaving..." % name)
return
if full:
for vol in pool.listAllVolumes():
vol.delete(0)
if pool.isActive():
pool.destroy()
pool.undefine()
def bootstrap(self, pool=None, poolpath=None, pooltype='dir', nets={}, image=None):
conn = self.conn
volumes = {}
try:
poolname = pool
pool = conn.storagePoolLookupByName(pool)
for vol in pool.listAllVolumes():
volumes[vol.name()] = {'object': vol}
except:
if poolpath is not None:
print("Pool %s not found...Creating it" % pool)
self.create_pool(name=pool, poolpath=poolpath, pooltype=pooltype)
if image is not None and os.path.basename(image) not in volumes:
self.add_image(image, poolname)
networks = []
for net in conn.listNetworks():
networks.append(net)
for net in nets:
if net not in networks:
print("Network %s not found...Creating it" % net)
cidr = nets[net].get('cidr')
dhcp = bool(nets[net].get('dchp', True))
self.create_network(name=net, cidr=cidr, dhcp=dhcp)
def network_ports(self, name):
conn = self.conn
machines = []
for vm in conn.listAllDomains(0):
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
for element in root.getiterator('interface'):
networktype = element.get('type')
if networktype == 'bridge':
network = element.find('source').get('bridge')
else:
network = element.find('source').get('network')
if network == name:
machines.append(vm.name())
return machines
def vm_ports(self, name):
conn = self.conn
networks = []
try:
vm = conn.lookupByName(name)
except:
print("VM %s not found" % name)
return
xml = vm.XMLDesc(0)
root = ET.fromstring(xml)
for element in root.getiterator('interface'):
networktype = element.get('type')
if networktype == 'bridge':
network = element.find('source').get('bridge')
else:
network = element.find('source').get('network')
networks.append(network)
return networks
def _get_bridge(self, name):
conn = self.conn
bridges = [interface.name() for interface in conn.listAllInterfaces()]
if name in bridges:
return name
try:
net = self.conn.networkLookupByName(name)
except:
return None
netxml = net.XMLDesc(0)
root = ET.fromstring(netxml)
bridge = root.getiterator('bridge')
if bridge:
attributes = bridge[0].attrib
bridge = attributes.get('name')
return bridge
| [
"[email protected]"
] | |
3fed61110663973cd1928a59a2125cf764eff374 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03437/s906498746.py | 3c6648d03f39f52d51a4df527cdc8f918756b654 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | m, n = map(int, input().split())
if m >= n and m % n == 0:
print(-1)
else:
print(m)
| [
"[email protected]"
] | |
cbd694d67c5985c58ecb43d81a9b857bc29e0727 | 1afa1b1929d1cd463cd9970174dd58ce2ca6eb1e | /configs/mobilenet_v3/lraspp_m-v3s-d8_scratch_512x1024_320k_cityscapes.py | a5507b4beaae675a1e5075a8fbd154ca5b5265c5 | [
"Apache-2.0"
] | permissive | CAU-HE/CMCDNet | 2328594bf4b883384c691099c72e119b65909121 | 31e660f81f3b625916a4c4d60cd606dcc8717f81 | refs/heads/main | 2023-08-08T17:21:57.199728 | 2023-07-28T07:34:40 | 2023-07-28T07:34:40 | 589,927,845 | 12 | 1 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | _base_ = './lraspp_m-v3-d8_scratch_512x1024_320k_cityscapes.py'
norm_cfg = dict(type='SyncBN', eps=0.001, requires_grad=True)
model = dict(
type='EncoderDecoder',
backbone=dict(
type='MobileNetV3',
arch='small',
out_indices=(0, 1, 12),
norm_cfg=norm_cfg),
decode_head=dict(
type='LRASPPHead',
in_channels=(16, 16, 576),
in_index=(0, 1, 2),
channels=128,
input_transform='multiple_select',
dropout_ratio=0.1,
num_classes=19,
norm_cfg=norm_cfg,
act_cfg=dict(type='ReLU'),
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)))
| [
"[email protected]"
] | |
df348e14d755b39f5502493836abc26b73a94792 | 8bbeb7b5721a9dbf40caa47a96e6961ceabb0128 | /python3/324.Wiggle Sort II(摆动排序 II).py | d276668f063a9cc860cd18310872f77ccd38876f | [
"MIT"
] | permissive | lishulongVI/leetcode | bb5b75642f69dfaec0c2ee3e06369c715125b1ba | 6731e128be0fd3c0bdfe885c1a409ac54b929597 | refs/heads/master | 2020-03-23T22:17:40.335970 | 2018-07-23T14:46:06 | 2018-07-23T14:46:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,414 | py | """
<p>Given an unsorted array <code>nums</code>, reorder it such that <code>nums[0] < nums[1] > nums[2] < nums[3]...</code>.</p>
<p><b>Example 1:</b></p>
<pre>
<strong>Input: </strong><code>nums = [1, 5, 1, 1, 6, 4]</code>
<strong>Output: </strong>One possible answer is <code>[1, 4, 1, 5, 1, 6]</code>.</pre>
<p><b>Example 2:</b></p>
<pre>
<strong>Input: </strong><code>nums = [1, 3, 2, 2, 3, 1]</code>
<strong>Output:</strong> One possible answer is <code>[2, 3, 1, 3, 1, 2]</code>.</pre>
<p><b>Note:</b><br />
You may assume all input has valid answer.</p>
<p><b>Follow Up:</b><br />
Can you do it in O(n) time and/or in-place with O(1) extra space?</p>
<p>给定一个无序的数组 <code>nums</code>,将它重新排列成 <code>nums[0] < nums[1] > nums[2] < nums[3]...</code> 的顺序。</p>
<p><strong>示例 1:</strong></p>
<pre><strong>输入: </strong><code>nums = [1, 5, 1, 1, 6, 4]</code>
<strong>输出: </strong>一个可能的答案是 <code>[1, 4, 1, 5, 1, 6]</code></pre>
<p><strong>示例 2:</strong></p>
<pre><strong>输入: </strong><code>nums = [1, 3, 2, 2, 3, 1]</code>
<strong>输出:</strong> 一个可能的答案是 <code>[2, 3, 1, 3, 1, 2]</code></pre>
<p><strong>说明:</strong><br>
你可以假设所有输入都会得到有效的结果。</p>
<p><strong>进阶:</strong><br>
你能用 O(n) 时间复杂度和 / 或原地 O(1) 额外空间来实现吗?</p>
<p>给定一个无序的数组 <code>nums</code>,将它重新排列成 <code>nums[0] < nums[1] > nums[2] < nums[3]...</code> 的顺序。</p>
<p><strong>示例 1:</strong></p>
<pre><strong>输入: </strong><code>nums = [1, 5, 1, 1, 6, 4]</code>
<strong>输出: </strong>一个可能的答案是 <code>[1, 4, 1, 5, 1, 6]</code></pre>
<p><strong>示例 2:</strong></p>
<pre><strong>输入: </strong><code>nums = [1, 3, 2, 2, 3, 1]</code>
<strong>输出:</strong> 一个可能的答案是 <code>[2, 3, 1, 3, 1, 2]</code></pre>
<p><strong>说明:</strong><br>
你可以假设所有输入都会得到有效的结果。</p>
<p><strong>进阶:</strong><br>
你能用 O(n) 时间复杂度和 / 或原地 O(1) 额外空间来实现吗?</p>
"""
class Solution:
def wiggleSort(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
| [
"[email protected]"
] | |
74780552abdcb29cbb2ff55c2bfafc323a3c67a6 | 0e478f3d8b6c323c093455428c9094c45de13bac | /src/OTLMOW/PostenMapping/Model/Post060371615.py | f8c41c27b0114312043dc81cf23f0e87d9548eb3 | [
"MIT"
] | permissive | davidvlaminck/OTLMOW | c6eae90b2cab8a741271002cde454427ca8b75ba | 48f8c357c475da1d2a1bc7820556843d4b37838d | refs/heads/main | 2023-01-12T05:08:40.442734 | 2023-01-10T15:26:39 | 2023-01-10T15:26:39 | 432,681,113 | 3 | 1 | MIT | 2022-06-20T20:36:00 | 2021-11-28T10:28:24 | Python | UTF-8 | Python | false | false | 4,410 | py | # coding=utf-8
from OTLMOW.PostenMapping.StandaardPost import StandaardPost
from OTLMOW.PostenMapping.StandaardPostMapping import StandaardPostMapping
# Generated with PostenCreator. To modify: extend, do not edit
class Post060371615(StandaardPost):
def __init__(self):
super().__init__(
nummer='0603.71615',
beschrijving='Gezaagde natuursteentegels, gebruiksklasse 6 volgens 6-3.8, 150 x 150, 50 mm',
meetstaateenheid='M2',
mappings=[StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel.afwerking',
dotnotation='afwerking',
defaultWaarde='gezaagd',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel.afmetingVanBestratingselementLxB',
dotnotation='afmetingVanBestratingselementLxB',
defaultWaarde='150-x-150',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Laag.laagRol',
dotnotation='laagRol',
defaultWaarde='straatlaag',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel.gebruiksklasse',
dotnotation='gebruiksklasse',
defaultWaarde='6',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#LaagDikte.dikte',
dotnotation='dikte',
defaultWaarde='5',
range='',
usagenote='cm^^cdt:ucumunit',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanNatuursteentegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Laag.oppervlakte',
dotnotation='oppervlakte',
defaultWaarde='',
range='',
usagenote='m2^^cdt:ucumunit',
isMeetstaatAttr=1,
isAltijdInTeVullen=1,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.71615')])
| [
"[email protected]"
] | |
cfb9b132d864866cf06c629af73bf710465ed333 | 96fed5ba4bb561750b8c990581e7aa928b1b2124 | /backend/django_app/apps/endpoints/migrations/0001_initial.py | 1538207dcd6c5358dbfe93597c905858663a9034 | [] | no_license | Inoxevious/fin_paradise_app | 99458bb18ea6a62482521c5adef5c44e61e96e38 | 22eb0c9bb92e03f7302588200ad3520ad7554694 | refs/heads/main | 2023-03-12T21:00:41.858629 | 2021-03-02T07:52:28 | 2021-03-02T07:52:28 | 338,951,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,018 | py | # Generated by Django 3.0.8 on 2021-02-13 13:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0006_auto_20210213_1301'),
]
operations = [
migrations.CreateModel(
name='Endpoint',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('owner', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='MLAlgorithm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('description', models.CharField(max_length=1000)),
('code', models.CharField(max_length=50000)),
('version', models.CharField(max_length=128)),
('owner', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('parent_endpoint', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='endpoints.Endpoint')),
],
),
migrations.CreateModel(
name='RetentionScores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('income_probability', models.FloatField(blank=True, null=True)),
('income_color', models.CharField(blank=True, max_length=70, null=True)),
('income_text', models.CharField(blank=True, max_length=255, null=True)),
('retention_probability', models.FloatField(blank=True, null=True)),
('retention_color', models.CharField(blank=True, max_length=70, null=True)),
('retention_classification', models.CharField(blank=True, max_length=255, null=True)),
('retention_recommendation_process', models.CharField(blank=True, max_length=255, null=True)),
('retention_num', models.CharField(blank=True, max_length=255, null=True)),
('retention_closure_date', models.CharField(blank=True, max_length=255, null=True)),
('retention_client_clv', models.CharField(blank=True, max_length=255, null=True)),
('credit_amount', models.FloatField(blank=True, null=True)),
('created_by', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_update_at', models.DateTimeField(auto_now_add=True)),
('client', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rs_client', to='users.Clients', verbose_name='client')),
('company', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rs_company', to='users.Organization', verbose_name='company')),
('loan_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rs_loan', to='users.Loan', verbose_name='Loan')),
('officer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rs_officer', to='users.LoanOfficer', verbose_name='officer')),
],
),
migrations.CreateModel(
name='MLRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('input_data', models.CharField(max_length=10000)),
('full_response', models.CharField(max_length=10000)),
('response', models.CharField(max_length=10000)),
('feedback', models.CharField(blank=True, max_length=10000, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('parent_mlalgorithm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='endpoints.MLAlgorithm')),
],
),
migrations.CreateModel(
name='MLAlgorithmStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(max_length=128)),
('active', models.BooleanField()),
('created_by', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('parent_mlalgorithm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='status', to='endpoints.MLAlgorithm')),
],
),
migrations.CreateModel(
name='BehaviouralScores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('income_probability', models.FloatField(blank=True, null=True)),
('income_color', models.CharField(blank=True, max_length=70, null=True)),
('income_text', models.CharField(blank=True, max_length=255, null=True)),
('behavioral_probability', models.FloatField(blank=True, null=True)),
('behavioral_color', models.CharField(blank=True, max_length=70, null=True)),
('behavioral_text', models.CharField(blank=True, max_length=255, null=True)),
('behavioral_time_to_default', models.CharField(blank=True, max_length=255, null=True)),
('behavioral_contact_channel', models.CharField(blank=True, max_length=255, null=True)),
('behavioral_contact_schedule', models.CharField(blank=True, max_length=255, null=True)),
('behavioral_message', models.CharField(blank=True, max_length=255, null=True)),
('credit_amount', models.FloatField(blank=True, null=True)),
('created_by', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_update_at', models.DateTimeField(auto_now_add=True)),
('client', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bs_client', to='users.Clients', verbose_name='client')),
('company', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bs_company', to='users.Organization', verbose_name='company')),
('loan_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bs_loan', to='users.Loan', verbose_name='Loan')),
('officer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bs_officer', to='users.LoanOfficer', verbose_name='officer')),
],
),
migrations.CreateModel(
name='ApplicationScores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('income_probability', models.FloatField(blank=True, null=True)),
('income_color', models.CharField(blank=True, max_length=70, null=True)),
('income_text', models.CharField(blank=True, max_length=255, null=True)),
('application_probability', models.FloatField(blank=True, null=True)),
('application_color', models.CharField(blank=True, max_length=70, null=True)),
('application_text', models.CharField(blank=True, max_length=255, null=True)),
('credit_amount', models.FloatField(blank=True, null=True)),
('created_by', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_update_at', models.DateTimeField(auto_now_add=True)),
('client', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='as_client', to='users.Clients', verbose_name='client')),
('company', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='as_company', to='users.Organization', verbose_name='company')),
('loan_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='as_loan', to='users.Loan', verbose_name='Loan')),
('officer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='as_officer', to='users.LoanOfficer', verbose_name='officer')),
],
),
migrations.CreateModel(
name='ABTest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=10000)),
('created_by', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('ended_at', models.DateTimeField(blank=True, null=True)),
('summary', models.CharField(blank=True, max_length=10000, null=True)),
('parent_mlalgorithm_1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='parent_mlalgorithm_1', to='endpoints.MLAlgorithm')),
('parent_mlalgorithm_2', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='parent_mlalgorithm_2', to='endpoints.MLAlgorithm')),
],
),
]
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.